You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/20 02:33:01 UTC

svn commit: r1524874 [2/9] - in /hive/branches/vectorization: ./ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/s...

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java Fri Sep 20 00:32:55 2013
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -63,7 +64,6 @@ import org.apache.hive.hcatalog.data.sch
 import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
 import org.apache.hive.hcatalog.mapreduce.FosterStorageHandler;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.PartInfo;
@@ -371,7 +371,7 @@ public class HCatUtil {
    * @return storageHandler instance
    * @throws IOException
    */
-  public static HCatStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
+  public static HiveStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
     return getStorageHandler(conf,
       storerInfo.getStorageHandlerClass(),
       storerInfo.getSerdeClass(),
@@ -379,7 +379,7 @@ public class HCatUtil {
       storerInfo.getOfClass());
   }
 
-  public static HCatStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
+  public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
     return HCatUtil.getStorageHandler(
       conf,
       partitionInfo.getStorageHandlerClassName(),
@@ -400,7 +400,7 @@ public class HCatUtil {
    * @return storageHandler instance
    * @throws IOException
    */
-  public static HCatStorageHandler getStorageHandler(Configuration conf,
+  public static HiveStorageHandler getStorageHandler(Configuration conf,
                              String storageHandler,
                              String serDe,
                              String inputFormat,
@@ -420,10 +420,10 @@ public class HCatUtil {
     }
 
     try {
-      Class<? extends HCatStorageHandler> handlerClass =
-        (Class<? extends HCatStorageHandler>) Class
+      Class<? extends HiveStorageHandler> handlerClass =
+        (Class<? extends HiveStorageHandler>) Class
           .forName(storageHandler, true, JavaUtils.getClassLoader());
-      return (HCatStorageHandler) ReflectionUtils.newInstance(
+      return (HiveStorageHandler) ReflectionUtils.newInstance(
         handlerClass, conf);
     } catch (ClassNotFoundException e) {
       throw new IOException("Error in loading storage handler."
@@ -444,8 +444,8 @@ public class HCatUtil {
   }
 
   public static Map<String, String>
-  getInputJobProperties(HCatStorageHandler storageHandler,
-              InputJobInfo inputJobInfo) {
+  getInputJobProperties(HiveStorageHandler storageHandler,
+      InputJobInfo inputJobInfo) {
     TableDesc tableDesc = new TableDesc(storageHandler.getSerDeClass(),
       storageHandler.getInputFormatClass(),
       storageHandler.getOutputFormatClass(),
@@ -454,6 +454,9 @@ public class HCatUtil {
       tableDesc.setJobProperties(new HashMap<String, String>());
     }
 
+    Properties mytableProperties = tableDesc.getProperties();
+    mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName());
+
     Map<String, String> jobProperties = new HashMap<String, String>();
     try {
       tableDesc.getJobProperties().put(
@@ -474,7 +477,7 @@ public class HCatUtil {
   @InterfaceAudience.Private
   @InterfaceStability.Evolving
   public static void
-  configureOutputStorageHandler(HCatStorageHandler storageHandler,
+  configureOutputStorageHandler(HiveStorageHandler storageHandler,
                   Configuration conf,
                   OutputJobInfo outputJobInfo) {
     //TODO replace IgnoreKeyTextOutputFormat with a
@@ -489,6 +492,11 @@ public class HCatUtil {
       tableDesc.getJobProperties().put(el.getKey(), el.getValue());
     }
 
+    Properties mytableProperties = tableDesc.getProperties();
+    mytableProperties.setProperty(
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,
+        outputJobInfo.getDatabaseName()+ "." + outputJobInfo.getTableName());
+
     Map<String, String> jobProperties = new HashMap<String, String>();
     try {
       tableDesc.getJobProperties().put(
@@ -498,6 +506,18 @@ public class HCatUtil {
       storageHandler.configureOutputJobProperties(tableDesc,
         jobProperties);
 
+      Map<String, String> tableJobProperties = tableDesc.getJobProperties();
+      if (tableJobProperties != null) {
+        if (tableJobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
+          String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
+          if (jobString != null) {
+            if  (!jobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
+              jobProperties.put(HCatConstants.HCAT_KEY_OUTPUT_INFO,
+                  tableJobProperties.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+            }
+          }
+        }
+      }
       for (Map.Entry<String, String> el : jobProperties.entrySet()) {
         conf.set(el.getKey(), el.getValue());
       }

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java Fri Sep 20 00:32:55 2013
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.mapredu
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -37,7 +38,7 @@ import org.apache.hive.hcatalog.data.HCa
  */
 class DefaultRecordWriterContainer extends RecordWriterContainer {
 
-  private final HCatStorageHandler storageHandler;
+  private final HiveStorageHandler storageHandler;
   private final SerDe serDe;
   private final OutputJobInfo jobInfo;
   private final ObjectInspector hcatRecordOI;

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java Fri Sep 20 00:32:55 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -75,7 +76,7 @@ class FileOutputCommitterContainer exten
 
   private Map<String, Map<String, String>> partitionsDiscoveredByPath;
   private Map<String, JobContext> contextDiscoveredByPath;
-  private final HCatStorageHandler cachedStorageHandler;
+  private final HiveStorageHandler cachedStorageHandler;
 
   HarOutputCommitterPostProcessor harProcessor = new HarOutputCommitterPostProcessor();
 

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java Fri Sep 20 00:32:55 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -87,7 +88,7 @@ class FileOutputFormatContainer extends 
     OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil
       .deserialize(jobInfoString);
     StorerInfo storeInfo = jobInfo.getTableInfo().getStorerInfo();
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
       context.getConfiguration(), storeInfo);
     Class<? extends SerDe> serde = storageHandler.getSerDeClass();
     SerDe sd = (SerDe) ReflectionUtils.newInstance(serde,

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java Fri Sep 20 00:32:55 2013
@@ -27,6 +27,7 @@ import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -51,7 +52,7 @@ import org.apache.hive.hcatalog.data.HCa
  */
 class FileRecordWriterContainer extends RecordWriterContainer {
 
-  private final HCatStorageHandler storageHandler;
+  private final HiveStorageHandler storageHandler;
   private final SerDe serDe;
   private final ObjectInspector objectInspector;
 
@@ -125,7 +126,7 @@ class FileRecordWriterContainer extends 
   /**
    * @return the storagehandler
    */
-  public HCatStorageHandler getStorageHandler() {
+  public HiveStorageHandler getStorageHandler() {
     return storageHandler;
   }
 

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java Fri Sep 20 00:32:55 2013
@@ -25,11 +25,13 @@ import org.apache.hadoop.hive.common.Fil
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.io.RCFile;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatUtil;
@@ -44,7 +46,7 @@ import java.util.Map;
  *  artifacts of tables which don't define a SerDe. This StorageHandler assumes
  *  the supplied storage artifacts are for a file-based storage system.
  */
-public class FosterStorageHandler extends HCatStorageHandler {
+public class FosterStorageHandler extends DefaultStorageHandler {
 
   public Configuration conf;
   /** The directory under which data is initially written for a partitioned table */
@@ -92,6 +94,11 @@ public class FosterStorageHandler extend
   }
 
   @Override
+  public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
+    //do nothing currently
+  }
+
+  @Override
   public void configureInputJobProperties(TableDesc tableDesc,
                       Map<String, String> jobProperties) {
 
@@ -161,7 +168,11 @@ public class FosterStorageHandler extend
 
   }
 
-  @Override
+  public void configureTableJobProperties(TableDesc tableDesc,
+      Map<String, String> jobProperties) {
+    return;
+  }
+
   OutputFormatContainer getOutputFormatContainer(
     org.apache.hadoop.mapred.OutputFormat outputFormat) {
     return new FileOutputFormatContainer(outputFormat);

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java Fri Sep 20 00:32:55 2013
@@ -29,6 +29,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
@@ -116,7 +117,7 @@ public abstract class HCatBaseInputForma
       return splits;
     }
 
-    HCatStorageHandler storageHandler;
+    HiveStorageHandler storageHandler;
     JobConf jobConf;
     //For each matching partition, call getSplits on the underlying InputFormat
     for (PartInfo partitionInfo : partitionInfoList) {
@@ -185,7 +186,7 @@ public abstract class HCatBaseInputForma
     JobContext jobContext = taskContext;
     Configuration conf = jobContext.getConfiguration();
 
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
       conf, partitionInfo);
 
     JobConf jobConf = HCatUtil.getJobConfFromContext(jobContext);

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java Fri Sep 20 00:32:55 2013
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -78,12 +79,21 @@ public abstract class HCatBaseOutputForm
    * @return the output format instance
    * @throws IOException
    */
-  protected OutputFormat<WritableComparable<?>, HCatRecord> getOutputFormat(JobContext context) throws IOException {
+  protected OutputFormat<WritableComparable<?>, HCatRecord> getOutputFormat(JobContext context) 
+    throws IOException {
     OutputJobInfo jobInfo = getJobInfo(context);
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), 
+        jobInfo.getTableInfo().getStorerInfo());
     //why do we need this?
     configureOutputStorageHandler(context);
-    return storageHandler.getOutputFormatContainer(ReflectionUtils.newInstance(storageHandler.getOutputFormatClass(), context.getConfiguration()));
+    if (storageHandler instanceof FosterStorageHandler) {
+      return new FileOutputFormatContainer(ReflectionUtils.newInstance(
+          storageHandler.getOutputFormatClass(),context.getConfiguration()));
+    }
+    else { 
+      return new DefaultOutputFormatContainer(ReflectionUtils.newInstance(
+          storageHandler.getOutputFormatClass(),context.getConfiguration()));
+    }
   }
 
   /**
@@ -134,7 +144,7 @@ public abstract class HCatBaseOutputForm
     Configuration conf = jobContext.getConfiguration();
     try {
       OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
-      HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, jobInfo.getTableInfo().getStorerInfo());
+      HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(jobContext.getConfiguration(),jobInfo.getTableInfo().getStorerInfo());
 
       Map<String, String> partitionValues = jobInfo.getPartitionValues();
       String location = jobInfo.getLocation();

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java Fri Sep 20 00:32:55 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.mapred.JobConte
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TaskAttemptContext;
 import org.apache.hadoop.mapred.TaskAttemptID;
+import org.apache.hadoop.mapreduce.JobID;
 
 public class HCatMapRedUtil {
 
@@ -43,7 +44,9 @@ public class HCatMapRedUtil {
   public static TaskAttemptContext createTaskAttemptContext(JobConf conf, TaskAttemptID id, Progressable progressable) {
     return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, id, (Reporter) progressable);
   }
-
+  public static org.apache.hadoop.mapreduce.TaskAttemptID createTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) {
+    return ShimLoader.getHadoopShims().newTaskAttemptID(jobId, isMap, taskId, id);
+  }
   public static org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapreduce.JobContext context) {
     return createJobContext((JobConf)context.getConfiguration(),
                 context.getJobID(),

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java Fri Sep 20 00:32:55 2013
@@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -170,7 +171,7 @@ public class HCatOutputFormat extends HC
         partitionCols.add(schema.getName());
       }
 
-      HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
+      HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 
       //Serialize the output info into the configuration
       outputJobInfo.setTableInfo(HCatTableInfo.valueOf(table.getTTable()));

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java Fri Sep 20 00:32:55 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.io.Writable;
@@ -58,7 +59,7 @@ class HCatRecordReader extends RecordRea
   private org.apache.hadoop.mapred.RecordReader<WritableComparable, Writable> baseRecordReader;
 
   /** The storage handler used */
-  private final HCatStorageHandler storageHandler;
+  private final HiveStorageHandler storageHandler;
 
   private Deserializer deserializer;
 
@@ -70,7 +71,7 @@ class HCatRecordReader extends RecordRea
   /**
    * Instantiates a new hcat record reader.
    */
-  public HCatRecordReader(HCatStorageHandler storageHandler,
+  public HCatRecordReader(HiveStorageHandler storageHandler,
               Map<String, String> valuesNotInDataCols) {
     this.storageHandler = storageHandler;
     this.valuesNotInDataCols = valuesNotInDataCols;
@@ -106,7 +107,7 @@ class HCatRecordReader extends RecordRea
   }
 
   private org.apache.hadoop.mapred.RecordReader createBaseRecordReader(HCatSplit hcatSplit,
-                                     HCatStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException {
+                                     HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException {
 
     JobConf jobConf = HCatUtil.getJobConfFromContext(taskContext);
     HCatUtil.copyJobPropertiesToJobConf(hcatSplit.getPartitionInfo().getJobProperties(), jobConf);
@@ -116,7 +117,7 @@ class HCatRecordReader extends RecordRea
       InternalUtil.createReporter(taskContext));
   }
 
-  private void createDeserializer(HCatSplit hcatSplit, HCatStorageHandler storageHandler,
+  private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler,
                   TaskAttemptContext taskContext) throws IOException {
 
     deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(),

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java Fri Sep 20 00:32:55 2013
@@ -27,6 +27,7 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -154,7 +155,7 @@ class InitializeInput {
     StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
 
     Properties hcatProperties = new Properties();
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 
     // copy the properties from storageHandler to jobProperties
     Map<String, String> jobProperties = HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java Fri Sep 20 00:32:55 2013
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 
 /** The Class used to serialize the partition information read from the metadata server that maps to a partition. */
@@ -63,7 +64,7 @@ public class PartInfo implements Seriali
    * @param jobProperties the job properties
    * @param tableInfo the table information
    */
-  public PartInfo(HCatSchema partitionSchema, HCatStorageHandler storageHandler,
+  public PartInfo(HCatSchema partitionSchema, HiveStorageHandler storageHandler,
           String location, Properties hcatProperties,
           Map<String, String> jobProperties, HCatTableInfo tableInfo) {
     this.partitionSchema = partitionSchema;

Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java Fri Sep 20 00:32:55 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProviderBase;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 
 /**
  * A HiveAuthorizationProvider which delegates the authorization requests to 
@@ -84,8 +83,8 @@ public class StorageDelegationAuthorizat
     HiveStorageHandler handler = table.getStorageHandler();
 
     if (handler != null) {
-      if (handler instanceof HCatStorageHandler) {
-        return ((HCatStorageHandler) handler).getAuthorizationProvider();
+      if (handler instanceof HiveStorageHandler) {
+        return ((HiveStorageHandler) handler).getAuthorizationProvider();
       } else {
         String authProviderClass = authProviders.get(handler.getClass().getCanonicalName());
 

Modified: hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java Fri Sep 20 00:32:55 2013
@@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory;
 public class TestPermsGrp extends TestCase {
 
   private boolean isServerRunning = false;
-  private static final int msPort = 20101;
+  private int msPort;
   private HiveConf hcatConf;
   private Warehouse clientWH;
   private HiveMetaStoreClient msc;
@@ -75,6 +75,7 @@ public class TestPermsGrp extends TestCa
       return;
     }
 
+    msPort = MetaStoreUtils.findFreePort();
     MetaStoreUtils.startMetaStore(msPort, ShimLoader.getHadoopThriftAuthBridge());
 
     isServerRunning = true;
@@ -87,6 +88,7 @@ public class TestPermsGrp extends TestCa
     hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+    hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
 
     hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
     hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");

Modified: hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java Fri Sep 20 00:32:55 2013
@@ -70,7 +70,7 @@ public class TestHCatPartitionPublish {
   private static FileSystem fs = null;
   private static MiniMRCluster mrCluster = null;
   private static boolean isServerRunning = false;
-  private static final int msPort = 20101;
+  private static int msPort;
   private static HiveConf hcatConf;
   private static HiveMetaStoreClient msc;
   private static SecurityManager securityManager;
@@ -94,6 +94,8 @@ public class TestHCatPartitionPublish {
       return;
     }
 
+    msPort = MetaStoreUtils.findFreePort();
+
     MetaStoreUtils.startMetaStore(msPort, ShimLoader
         .getHadoopThriftAuthBridge());
     isServerRunning = true;
@@ -106,6 +108,7 @@ public class TestHCatPartitionPublish {
         + msPort);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+    hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
     hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
         HCatSemanticAnalyzer.class.getName());
     hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");

Modified: hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java Fri Sep 20 00:32:55 2013
@@ -27,8 +27,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -51,32 +50,40 @@ import org.apache.hcatalog.data.schema.H
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.Tuple;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 
 /**
  * @deprecated Use/modify {@link org.apache.hive.hcatalog.mapreduce.TestSequenceFileReadWrite} instead
  */
-public class TestSequenceFileReadWrite extends TestCase {
-  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
-      "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
-  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
-  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
-
-  private static Driver driver;
-  private static PigServer server;
-  private static String[] input;
-  private static HiveConf hiveConf;
+public class TestSequenceFileReadWrite {
 
-  public void Initialize() throws Exception {
+  private File dataDir;
+  private String warehouseDir;
+  private String inputFileName;
+  private Driver driver;
+  private PigServer server;
+  private String[] input;
+  private HiveConf hiveConf;
+
+  @Before
+  public void setup() throws Exception {
+    dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + 
+        TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis());
     hiveConf = new HiveConf(this.getClass());
+    warehouseDir = new File(dataDir, "warehouse").getAbsolutePath();
+    inputFileName = new File(dataDir, "input.data").getAbsolutePath();
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir);
     driver = new Driver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
-    new File(TEST_WAREHOUSE_DIR).mkdirs();
+    if(!(new File(warehouseDir).mkdirs())) {
+      throw new RuntimeException("Could not create " + warehouseDir);
+    }
 
     int numRows = 3;
     input = new String[numRows];
@@ -85,13 +92,19 @@ public class TestSequenceFileReadWrite e
       String col2 = "b" + i;
       input[i] = i + "," + col1 + "," + col2;
     }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    HcatTestUtils.createTestDataFile(inputFileName, input);
     server = new PigServer(ExecType.LOCAL);
   }
 
+  @After
+  public void teardown() throws IOException {
+    if(dataDir != null) {
+      FileUtils.deleteDirectory(dataDir);
+    }
+  }
+  
   @Test
   public void testSequenceTableWriteRead() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";
     driver.run("drop table demo_table");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -99,7 +112,7 @@ public class TestSequenceFileReadWrite e
 
     server.setBatchOn();
     server.registerQuery("A = load '"
-        + INPUT_FILE_NAME
+        + inputFileName
         + "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
     server.registerQuery("store A into 'demo_table' using org.apache.hcatalog.pig.HCatStorer();");
     server.executeBatch();
@@ -120,7 +133,6 @@ public class TestSequenceFileReadWrite e
 
   @Test
   public void testTextTableWriteRead() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table_1(a0 int, a1 String, a2 String) STORED AS TEXTFILE";
     driver.run("drop table demo_table_1");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -128,7 +140,7 @@ public class TestSequenceFileReadWrite e
 
     server.setBatchOn();
     server.registerQuery("A = load '"
-        + INPUT_FILE_NAME
+        + inputFileName
         + "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
     server.registerQuery("store A into 'demo_table_1' using org.apache.hcatalog.pig.HCatStorer();");
     server.executeBatch();
@@ -149,7 +161,6 @@ public class TestSequenceFileReadWrite e
 
   @Test
   public void testSequenceTableWriteReadMR() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table_2(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";
     driver.run("drop table demo_table_2");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -165,7 +176,7 @@ public class TestSequenceFileReadWrite e
     job.setOutputKeyClass(NullWritable.class);
     job.setOutputValueClass(DefaultHCatRecord.class);
     job.setInputFormatClass(TextInputFormat.class);
-    TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+    TextInputFormat.setInputPaths(job, inputFileName);
 
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
         MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null));
@@ -196,7 +207,6 @@ public class TestSequenceFileReadWrite e
 
   @Test
   public void testTextTableWriteReadMR() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table_3(a0 int, a1 String, a2 String) STORED AS TEXTFILE";
     driver.run("drop table demo_table_3");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -213,7 +223,7 @@ public class TestSequenceFileReadWrite e
     job.setOutputValueClass(DefaultHCatRecord.class);
     job.setInputFormatClass(TextInputFormat.class);
     job.setNumReduceTasks(0);
-    TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+    TextInputFormat.setInputPaths(job, inputFileName);
 
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
         MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null));

Modified: hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java Fri Sep 20 00:32:55 2013
@@ -55,7 +55,7 @@ import org.slf4j.LoggerFactory;
 public class TestPermsGrp extends TestCase {
 
   private boolean isServerRunning = false;
-  private static final int msPort = 20101;
+  private int msPort;
   private HiveConf hcatConf;
   private Warehouse clientWH;
   private HiveMetaStoreClient msc;
@@ -72,7 +72,9 @@ public class TestPermsGrp extends TestCa
     if (isServerRunning) {
       return;
     }
-
+    
+    
+    msPort = MetaStoreUtils.findFreePort();
     MetaStoreUtils.startMetaStore(msPort, ShimLoader.getHadoopThriftAuthBridge());
 
     isServerRunning = true;
@@ -85,6 +87,7 @@ public class TestPermsGrp extends TestCa
     hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+    hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
 
     hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
     hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -97,7 +100,6 @@ public class TestPermsGrp extends TestCa
     System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
   }
 
-
   public void testCustomPerms() throws Exception {
 
     String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;

Modified: hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java Fri Sep 20 00:32:55 2013
@@ -70,7 +70,7 @@ public class TestHCatPartitionPublish {
   private static FileSystem fs = null;
   private static MiniMRCluster mrCluster = null;
   private static boolean isServerRunning = false;
-  private static final int msPort = 20101;
+  private static int msPort;
   private static HiveConf hcatConf;
   private static HiveMetaStoreClient msc;
   private static SecurityManager securityManager;
@@ -98,6 +98,8 @@ public class TestHCatPartitionPublish {
       return;
     }
 
+    msPort = MetaStoreUtils.findFreePort();
+
     MetaStoreUtils.startMetaStore(msPort, ShimLoader
         .getHadoopThriftAuthBridge());
     Thread.sleep(10000);
@@ -111,6 +113,7 @@ public class TestHCatPartitionPublish {
         + msPort);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
     hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+    hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
     hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
         HCatSemanticAnalyzer.class.getName());
     hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");

Modified: hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java Fri Sep 20 00:32:55 2013
@@ -27,8 +27,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -51,29 +50,38 @@ import org.apache.hive.hcatalog.data.sch
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.Tuple;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 
-public class TestSequenceFileReadWrite extends TestCase {
-  private static final String TEST_DATA_DIR =
-      "/tmp/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
-  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
-  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
-
-  private static Driver driver;
-  private static PigServer server;
-  private static String[] input;
-  private static HiveConf hiveConf;
+public class TestSequenceFileReadWrite {
 
-  public void Initialize() throws Exception {
+  private File dataDir;
+  private String warehouseDir;
+  private String inputFileName;
+  private Driver driver;
+  private PigServer server;
+  private String[] input;
+  private HiveConf hiveConf;
+
+  @Before
+  public void setup() throws Exception {
+    dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + 
+        TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis());
+    hiveConf = new HiveConf(this.getClass());
+    warehouseDir = new File(dataDir, "warehouse").getAbsolutePath();
+    inputFileName = new File(dataDir, "input.data").getAbsolutePath();
     hiveConf = new HiveConf(this.getClass());
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir);
     driver = new Driver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
-    new File(TEST_WAREHOUSE_DIR).mkdirs();
+    if(!(new File(warehouseDir).mkdirs())) {
+      throw new RuntimeException("Could not create " + warehouseDir);
+    }
 
     int numRows = 3;
     input = new String[numRows];
@@ -82,13 +90,18 @@ public class TestSequenceFileReadWrite e
       String col2 = "b" + i;
       input[i] = i + "," + col1 + "," + col2;
     }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    HcatTestUtils.createTestDataFile(inputFileName, input);
     server = new PigServer(ExecType.LOCAL);
   }
+  @After
+  public void teardown() throws IOException {
+    if(dataDir != null) {
+      FileUtils.deleteDirectory(dataDir);
+    }
+  }
 
   @Test
   public void testSequenceTableWriteRead() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";
     driver.run("drop table demo_table");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -96,7 +109,7 @@ public class TestSequenceFileReadWrite e
 
     server.setBatchOn();
     server.registerQuery("A = load '"
-        + INPUT_FILE_NAME
+        + inputFileName
         + "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
     server.registerQuery("store A into 'demo_table' using org.apache.hive.hcatalog.pig.HCatStorer();");
     server.executeBatch();
@@ -117,7 +130,6 @@ public class TestSequenceFileReadWrite e
 
   @Test
   public void testTextTableWriteRead() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table_1(a0 int, a1 String, a2 String) STORED AS TEXTFILE";
     driver.run("drop table demo_table_1");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -125,7 +137,7 @@ public class TestSequenceFileReadWrite e
 
     server.setBatchOn();
     server.registerQuery("A = load '"
-        + INPUT_FILE_NAME
+        + inputFileName
         + "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
     server.registerQuery("store A into 'demo_table_1' using org.apache.hive.hcatalog.pig.HCatStorer();");
     server.executeBatch();
@@ -146,7 +158,6 @@ public class TestSequenceFileReadWrite e
 
   @Test
   public void testSequenceTableWriteReadMR() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table_2(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";
     driver.run("drop table demo_table_2");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -162,7 +173,7 @@ public class TestSequenceFileReadWrite e
     job.setOutputKeyClass(NullWritable.class);
     job.setOutputValueClass(DefaultHCatRecord.class);
     job.setInputFormatClass(TextInputFormat.class);
-    TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+    TextInputFormat.setInputPaths(job, inputFileName);
 
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
         MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null));
@@ -193,7 +204,6 @@ public class TestSequenceFileReadWrite e
 
   @Test
   public void testTextTableWriteReadMR() throws Exception {
-    Initialize();
     String createTable = "CREATE TABLE demo_table_3(a0 int, a1 String, a2 String) STORED AS TEXTFILE";
     driver.run("drop table demo_table_3");
     int retCode1 = driver.run(createTable).getResponseCode();
@@ -210,7 +220,7 @@ public class TestSequenceFileReadWrite e
     job.setOutputValueClass(DefaultHCatRecord.class);
     job.setInputFormatClass(TextInputFormat.class);
     job.setNumReduceTasks(0);
-    TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+    TextInputFormat.setInputPaths(job, inputFileName);
 
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
         MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null));

Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java Fri Sep 20 00:32:55 2013
@@ -18,6 +18,11 @@
  */
 package org.apache.hcatalog.pig;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.RandomAccessFile;
@@ -29,8 +34,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -48,13 +52,16 @@ import org.apache.pig.data.DataType;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * @deprecated Use/modify {@link org.apache.hive.hcatalog.pig.TestHCatLoader} instead
  */
-public class TestHCatLoader extends TestCase {
-  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
-    "/build/test/data/" + TestHCatLoader.class.getCanonicalName();
+public class TestHCatLoader {
+  private static final String TEST_DATA_DIR = System.getProperty("java.io.tmpdir") + File.separator
+      + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data";
   private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data";
@@ -63,13 +70,9 @@ public class TestHCatLoader extends Test
   private static final String COMPLEX_TABLE = "junit_unparted_complex";
   private static final String PARTITIONED_TABLE = "junit_parted_basic";
   private static final String SPECIFIC_SIZE_TABLE = "junit_specific_size";
-  private static Driver driver;
-
-  private static int guardTestCount = 6; // ugh, instantiate using introspection in guardedSetupBeforeClass
-  private static boolean setupHasRun = false;
 
-
-  private static Map<Integer, Pair<Integer, String>> basicInputData;
+  private Driver driver;
+  private Map<Integer, Pair<Integer, String>> basicInputData;
 
   protected String storageFormat() {
     return "RCFILE tblproperties('hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver'," +
@@ -97,18 +100,16 @@ public class TestHCatLoader extends Test
     createTable(tablename, schema, null);
   }
 
-  protected void guardedSetUpBeforeClass() throws Exception {
-    if (!setupHasRun) {
-      setupHasRun = true;
-    } else {
-      return;
-    }
+  @Before
+  public void setup() throws Exception {
 
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
     }
-    new File(TEST_WAREHOUSE_DIR).mkdirs();
+    if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+      throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
+    }
 
     HiveConf hiveConf = new HiveConf(this.getClass());
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -118,8 +119,6 @@ public class TestHCatLoader extends Test
     driver = new Driver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
-    cleanup();
-
     createTable(BASIC_TABLE, "a int, b string");
     createTable(COMPLEX_TABLE,
       "name string, studentid int, "
@@ -172,29 +171,16 @@ public class TestHCatLoader extends Test
 
   }
 
-  private void cleanup() throws IOException, CommandNeedRetryException {
-    dropTable(BASIC_TABLE);
-    dropTable(COMPLEX_TABLE);
-    dropTable(PARTITIONED_TABLE);
-    dropTable(SPECIFIC_SIZE_TABLE);
-  }
-
-  protected void guardedTearDownAfterClass() throws Exception {
-    guardTestCount--;
-    if (guardTestCount > 0) {
-      return;
+  @After
+  public void tearDown() throws Exception {
+    try {
+      dropTable(BASIC_TABLE);
+      dropTable(COMPLEX_TABLE);
+      dropTable(PARTITIONED_TABLE);
+      dropTable(SPECIFIC_SIZE_TABLE);
+    } finally {
+      FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
     }
-    cleanup();
-  }
-
-  @Override
-  protected void setUp() throws Exception {
-    guardedSetUpBeforeClass();
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-    guardedTearDownAfterClass();
   }
 
   public void testSchemaLoadBasic() throws IOException {
@@ -213,6 +199,7 @@ public class TestHCatLoader extends Test
 
   }
 
+  @Test
   public void testReadDataBasic() throws IOException {
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -230,7 +217,7 @@ public class TestHCatLoader extends Test
     }
     assertEquals(basicInputData.size(), numTuplesRead);
   }
-
+  @Test
   public void testSchemaLoadComplex() throws IOException {
 
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -287,7 +274,7 @@ public class TestHCatLoader extends Test
     }
 
   }
-
+  @Test
   public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -350,7 +337,7 @@ public class TestHCatLoader extends Test
     }
     assertEquals(6, count2);
   }
-
+  @Test
   public void testProjectionsBasic() throws IOException {
 
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -395,21 +382,21 @@ public class TestHCatLoader extends Test
     }
     assertEquals(basicInputData.size(), numTuplesRead);
   }
-
+  @Test
   public void testGetInputBytes() throws Exception {
     File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE + "/part-m-00000");
     file.deleteOnExit();
     RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
     randomAccessFile.setLength(2L * 1024 * 1024 * 1024);
-
+    randomAccessFile.close();
     Job job = new Job();
     HCatLoader hCatLoader = new HCatLoader();
-    hCatLoader.setUDFContextSignature(this.getName());
+    hCatLoader.setUDFContextSignature("testGetInputBytes");
     hCatLoader.setLocation(SPECIFIC_SIZE_TABLE, job);
     ResourceStatistics statistics = hCatLoader.getStatistics(file.getAbsolutePath(), job);
     assertEquals(2048, (long) statistics.getmBytes());
   }
-
+  @Test
   public void testConvertBooleanToInt() throws Exception {
     String tbl = "test_convert_boolean_to_int";
     String inputFileName = TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";

Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java Fri Sep 20 00:32:55 2013
@@ -210,7 +210,7 @@ public class TestHCatLoaderComplexSchema
       while (it.hasNext()) {
         Tuple input = data.get(i++);
         Tuple output = it.next();
-        Assert.assertEquals(input.toString(), output.toString());
+        compareTuples(input, output);
         LOG.info("tuple : {} ", output);
       }
       Schema dumpedXSchema = server.dumpSchema("X");
@@ -224,6 +224,23 @@ public class TestHCatLoaderComplexSchema
       dropTable(tablename);
     }
   }
+  private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
+    Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
+    for (int i = 0; i < t1.size(); i++) {
+      Object f1 = t1.get(i);
+      Object f2 = t2.get(i);
+      Assert.assertNotNull("left", f1);
+      Assert.assertNotNull("right", f2);
+      String msg = "right: " + f1 + ", left: " + f2;
+      Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
+    }
+  }
+  
+  private String noOrder(String s) {
+    char[] chars = s.toCharArray();
+    Arrays.sort(chars);
+    return new String(chars);
+  }
 
   private String compareIgnoreFiledNames(Schema expected, Schema got) throws FrontendException {
     if (expected == null || got == null) {

Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java Fri Sep 20 00:32:55 2013
@@ -25,6 +25,7 @@ import java.util.Iterator;
 
 import junit.framework.TestCase;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
@@ -35,6 +36,7 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.RecordWriter;
@@ -55,8 +57,8 @@ import org.apache.pig.data.Tuple;
 
 public class TestE2EScenarios extends TestCase {
 
-  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
-    "/build/test/data/" + TestHCatLoader.class.getCanonicalName();
+  private static final String TEST_DATA_DIR = System.getProperty("java.io.tmpdir") + File.separator
+      + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
 
   private static final String TEXTFILE_LOCN = TEST_DATA_DIR + "/textfile";
@@ -74,7 +76,9 @@ public class TestE2EScenarios extends Te
     if (f.exists()) {
       FileUtil.fullyDelete(f);
     }
-    new File(TEST_WAREHOUSE_DIR).mkdirs();
+    if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+      throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
+    }
 
     HiveConf hiveConf = new HiveConf(this.getClass());
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -88,9 +92,13 @@ public class TestE2EScenarios extends Te
 
   @Override
   protected void tearDown() throws Exception {
-    dropTable("inpy");
-    dropTable("rc5318");
-    dropTable("orc5318");
+    try {
+      dropTable("inpy");
+      dropTable("rc5318");
+      dropTable("orc5318");
+    } finally {
+      FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
+    }
   }
 
   private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
@@ -191,9 +199,9 @@ public class TestE2EScenarios extends Te
 
   private TaskAttemptContext createTaskAttemptContext(Configuration tconf) {
     Configuration conf = (tconf == null) ? (new Configuration()) : tconf;
-    TaskAttemptID taskId = new TaskAttemptID();
+    TaskAttemptID taskId = HCatMapRedUtil.createTaskAttemptID(new JobID("200908190029", 1), false, 1, 1);
     conf.setInt("mapred.task.partition", taskId.getId());
-    conf.set("mapred.task.id", "attempt__0000_r_000000_" + taskId.getId());
+    conf.set("mapred.task.id", taskId.toString());
     TaskAttemptContext rtaskContext = HCatMapRedUtil.createTaskAttemptContext(conf , taskId);
     return rtaskContext;
   }

Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java Fri Sep 20 00:32:55 2013
@@ -18,6 +18,11 @@
  */
 package org.apache.hive.hcatalog.pig;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.RandomAccessFile;
@@ -29,8 +34,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -48,10 +52,13 @@ import org.apache.pig.data.DataType;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
-
-public class TestHCatLoader extends TestCase {
-  private static final String TEST_DATA_DIR =
-    "/tmp/build/test/data/" + TestHCatLoader.class.getCanonicalName();
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestHCatLoader {
+  private static final String TEST_DATA_DIR = System.getProperty("java.io.tmpdir") + File.separator
+      + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data";
   private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data";
@@ -60,13 +67,9 @@ public class TestHCatLoader extends Test
   private static final String COMPLEX_TABLE = "junit_unparted_complex";
   private static final String PARTITIONED_TABLE = "junit_parted_basic";
   private static final String SPECIFIC_SIZE_TABLE = "junit_specific_size";
-  private static Driver driver;
-
-  private static int guardTestCount = 6; // ugh, instantiate using introspection in guardedSetupBeforeClass
-  private static boolean setupHasRun = false;
 
-
-  private static Map<Integer, Pair<Integer, String>> basicInputData;
+  private Driver driver;
+  private Map<Integer, Pair<Integer, String>> basicInputData;
 
   protected String storageFormat() {
     return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
@@ -94,18 +97,16 @@ public class TestHCatLoader extends Test
     createTable(tablename, schema, null);
   }
 
-  protected void guardedSetUpBeforeClass() throws Exception {
-    if (!setupHasRun) {
-      setupHasRun = true;
-    } else {
-      return;
-    }
+  @Before
+  public void setup() throws Exception {
 
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
     }
-    new File(TEST_WAREHOUSE_DIR).mkdirs();
+    if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+      throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
+    }
 
     HiveConf hiveConf = new HiveConf(this.getClass());
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -115,8 +116,6 @@ public class TestHCatLoader extends Test
     driver = new Driver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 
-    cleanup();
-
     createTable(BASIC_TABLE, "a int, b string");
     createTable(COMPLEX_TABLE,
       "name string, studentid int, "
@@ -169,31 +168,19 @@ public class TestHCatLoader extends Test
 
   }
 
-  private void cleanup() throws IOException, CommandNeedRetryException {
-    dropTable(BASIC_TABLE);
-    dropTable(COMPLEX_TABLE);
-    dropTable(PARTITIONED_TABLE);
-    dropTable(SPECIFIC_SIZE_TABLE);
-  }
-
-  protected void guardedTearDownAfterClass() throws Exception {
-    guardTestCount--;
-    if (guardTestCount > 0) {
-      return;
+  @After
+  public void tearDown() throws Exception {
+    try {
+      dropTable(BASIC_TABLE);
+      dropTable(COMPLEX_TABLE);
+      dropTable(PARTITIONED_TABLE);
+      dropTable(SPECIFIC_SIZE_TABLE);
+    } finally {
+      FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
     }
-    cleanup();
-  }
-
-  @Override
-  protected void setUp() throws Exception {
-    guardedSetUpBeforeClass();
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-    guardedTearDownAfterClass();
   }
 
+  @Test
   public void testSchemaLoadBasic() throws IOException {
 
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -210,6 +197,7 @@ public class TestHCatLoader extends Test
 
   }
 
+  @Test
   public void testReadDataBasic() throws IOException {
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -228,6 +216,7 @@ public class TestHCatLoader extends Test
     assertEquals(basicInputData.size(), numTuplesRead);
   }
 
+  @Test
   public void testSchemaLoadComplex() throws IOException {
 
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -285,6 +274,7 @@ public class TestHCatLoader extends Test
 
   }
 
+  @Test
   public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -348,6 +338,7 @@ public class TestHCatLoader extends Test
     assertEquals(6, count2);
   }
 
+  @Test
   public void testProjectionsBasic() throws IOException {
 
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -393,20 +384,22 @@ public class TestHCatLoader extends Test
     assertEquals(basicInputData.size(), numTuplesRead);
   }
 
+  @Test
   public void testGetInputBytes() throws Exception {
     File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE + "/part-m-00000");
     file.deleteOnExit();
     RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
     randomAccessFile.setLength(2L * 1024 * 1024 * 1024);
-
+    randomAccessFile.close();
     Job job = new Job();
     HCatLoader hCatLoader = new HCatLoader();
-    hCatLoader.setUDFContextSignature(this.getName());
+    hCatLoader.setUDFContextSignature("testGetInputBytes");
     hCatLoader.setLocation(SPECIFIC_SIZE_TABLE, job);
     ResourceStatistics statistics = hCatLoader.getStatistics(file.getAbsolutePath(), job);
     assertEquals(2048, (long) statistics.getmBytes());
   }
 
+  @Test
   public void testConvertBooleanToInt() throws Exception {
     String tbl = "test_convert_boolean_to_int";
     String inputFileName = TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";

Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java Fri Sep 20 00:32:55 2013
@@ -211,7 +211,7 @@ public class TestHCatLoaderComplexSchema
       while (it.hasNext()) {
         Tuple input = data.get(i++);
         Tuple output = it.next();
-        Assert.assertEquals(input.toString(), output.toString());
+        compareTuples(input, output);
         LOG.info("tuple : {} ", output);
       }
       Schema dumpedXSchema = server.dumpSchema("X");
@@ -225,6 +225,24 @@ public class TestHCatLoaderComplexSchema
       dropTable(tablename);
     }
   }
+  
+  private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
+    Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
+    for (int i = 0; i < t1.size(); i++) {
+      Object f1 = t1.get(i);
+      Object f2 = t2.get(i);
+      Assert.assertNotNull("left", f1);
+      Assert.assertNotNull("right", f2);
+      String msg = "right: " + f1 + ", left: " + f2;
+      Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
+    }
+  }
+  
+  private String noOrder(String s) {
+    char[] chars = s.toCharArray();
+    Arrays.sort(chars);
+    return new String(chars);
+  }
 
   private String compareIgnoreFiledNames(Schema expected, Schema got) throws FrontendException {
     if (expected == null || got == null) {

Modified: hive/branches/vectorization/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Fri Sep 20 00:32:55 2013
@@ -377,7 +377,37 @@ $cfg = 
 
     },
 
-
+    {
+     # test to use "add jar" command in hive script, the jar must be shipped to
+     # templeton controller job using "files". Test #9 is a positive test case
+     # for this case when jar is shipped, #10 is a negative test case                        
+     'num' => 9,
+     'method' => 'POST',
+     'url' => ':TEMPLETON_URL:/templeton/v1/hive',
+     'post_options' => ['user.name=:UNAME:','execute=add jar piggybank.jar', 'files=:INPDIR_HDFS:/piggybank.jar',],
+     'json_field_substr_match' => { 'id' => '\d+'},
+                                #results
+     'status_code' => 200,
+     'check_job_created' => 1,
+     'check_job_complete' => 'SUCCESS', 
+     'check_job_exit_value' => 0,
+     'check_call_back' => 1,
+    },
+    {
+     # negative test case for "add jar" when jar is not shipped to templeton
+     # controller job using "files"
+     'num' => 10,
+     'method' => 'POST',
+     'url' => ':TEMPLETON_URL:/templeton/v1/hive',
+     'post_options' => ['user.name=:UNAME:','execute=add jar piggybank.jar',],
+     'json_field_substr_match' => { 'id' => '\d+'},
+                                #results
+     'status_code' => 200,
+     'check_job_created' => 1,
+     'check_job_complete' => 'SUCCESS', 
+     'check_job_exit_value' => 1,
+     'check_call_back' => 1,
+    },
 
    ]
   },

Modified: hive/branches/vectorization/hcatalog/storage-handlers/hbase/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/storage-handlers/hbase/pom.xml?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/storage-handlers/hbase/pom.xml (original)
+++ hive/branches/vectorization/hcatalog/storage-handlers/hbase/pom.xml Fri Sep 20 00:32:55 2013
@@ -54,6 +54,12 @@
       <scope>compile</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hcatalog-pig-adapter</artifactId>
+      <version>${hcatalog.version}</version>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <version>${zookeeper.version}</version>
@@ -68,6 +74,12 @@
 
     <!-- test scope -->
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hcatalog-pig-adapter</artifactId>
+      <version>${hcatalog.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
       <version>${commons-io.version}</version>

Modified: hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (original)
+++ hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java Fri Sep 20 00:32:55 2013
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.serde2.SerDe;
@@ -66,7 +67,6 @@ import org.apache.hive.hcatalog.mapreduc
 import org.apache.hive.hcatalog.mapreduce.HCatTableInfo;
 import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 import org.apache.thrift.TBase;
 import org.apache.zookeeper.ZooKeeper;
 
@@ -78,7 +78,7 @@ import com.google.common.util.concurrent
  * tables through HCatalog. The implementation is very similar to the
  * HiveHBaseStorageHandler, with more details to suit HCatalog.
  */
-public class HBaseHCatStorageHandler extends HCatStorageHandler implements HiveMetaHook, Configurable {
+public class HBaseHCatStorageHandler extends  DefaultStorageHandler implements HiveMetaHook, Configurable {
 
   public final static String DEFAULT_PREFIX = "default.";
   private final static String PROPERTY_INT_OUTPUT_LOCATION = "hcat.hbase.mapreduce.intermediateOutputLocation";
@@ -87,6 +87,7 @@ public class HBaseHCatStorageHandler ext
   private Configuration jobConf;
   private HBaseAdmin admin;
 
+  @Deprecated
   @Override
   public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
     // Populate jobProperties with input table name, table columns, RM snapshot,
@@ -135,6 +136,7 @@ public class HBaseHCatStorageHandler ext
     }
   }
 
+  @Deprecated
   @Override
   public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
     // Populate jobProperties with output table name, hbase-default.xml, hbase-site.xml, OutputJobInfo
@@ -198,6 +200,7 @@ public class HBaseHCatStorageHandler ext
   * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler#
   * getAuthorizationProvider()
   */
+  @Deprecated
   @Override
   public HiveAuthorizationProvider getAuthorizationProvider()
     throws HiveException {
@@ -215,6 +218,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #commitCreateTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void commitCreateTable(Table table) throws MetaException {
   }
@@ -229,6 +233,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #commitDropTable(org.apache.hadoop.hive.metastore.api.Table, boolean)
    */
+  @Deprecated
   @Override
   public void commitDropTable(Table tbl, boolean deleteData)
     throws MetaException {
@@ -244,6 +249,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #preCreateTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void preCreateTable(Table tbl) throws MetaException {
     boolean isExternal = MetaStoreUtils.isExternalTable(tbl);
@@ -262,7 +268,7 @@ public class HBaseHCatStorageHandler ext
       if (hbaseColumnsMapping == null) {
         throw new MetaException(
           "No hbase.columns.mapping defined in table"
-            + " properties.");
+              + " properties.");
       }
 
       List<String> hbaseColumnFamilies = new ArrayList<String>();
@@ -346,6 +352,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #preDropTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void preDropTable(Table table) throws MetaException {
   }
@@ -358,6 +365,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #rollbackCreateTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void rollbackCreateTable(Table table) throws MetaException {
     checkDeleteTable(table);
@@ -371,6 +379,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #rollbackDropTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void rollbackDropTable(Table table) throws MetaException {
   }
@@ -380,6 +389,7 @@ public class HBaseHCatStorageHandler ext
    *
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler#getMetaHook()
    */
+  @Deprecated
   @Override
   public HiveMetaHook getMetaHook() {
     return this;
@@ -432,6 +442,7 @@ public class HBaseHCatStorageHandler ext
     return qualifiedName;
   }
 
+  @Deprecated
   @Override
   public Class<? extends InputFormat> getInputFormatClass() {
     return HBaseInputFormat.class;
@@ -450,6 +461,7 @@ public class HBaseHCatStorageHandler ext
   * @see
   * org.apache.hive.hcatalog.storagehandler.HCatStorageHandler#getSerDeClass()
   */
+  @Deprecated
   @Override
   public Class<? extends SerDe> getSerDeClass()
     throws UnsupportedOperationException {
@@ -460,6 +472,7 @@ public class HBaseHCatStorageHandler ext
     return jobConf;
   }
 
+  @Deprecated
   @Override
   public Configuration getConf() {
 
@@ -469,6 +482,7 @@ public class HBaseHCatStorageHandler ext
     return hbaseConf;
   }
 
+  @Deprecated
   @Override
   public void setConf(Configuration conf) {
     //setConf is called both during DDL operations and  mapred read/write jobs.

Modified: hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java (original)
+++ hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java Fri Sep 20 00:32:55 2013
@@ -36,6 +36,7 @@ public interface RevisionManager {
   /**
    * Initialize the revision manager.
    */
+  @Deprecated
   public void initialize(Configuration conf);
 
   /**
@@ -43,6 +44,7 @@ public interface RevisionManager {
    *
    * @throws IOException
    */
+  @Deprecated
   public void open() throws IOException;
 
   /**
@@ -50,6 +52,7 @@ public interface RevisionManager {
    *
    * @throws IOException
    */
+  @Deprecated
   public void close() throws IOException;
 
   /**
@@ -57,12 +60,14 @@ public interface RevisionManager {
    * @param table the hbase table name
    * @param columnFamilies the column families in the table
    */
+  @Deprecated
   public void createTable(String table, List<String> columnFamilies) throws IOException;
 
   /**
    * Remove table data from revision manager for a dropped table.
    * @param table the hbase table name
    */
+  @Deprecated
   public void dropTable(String table) throws IOException;
 
   /**
@@ -73,6 +78,7 @@ public interface RevisionManager {
    * @return a new Transaction
    * @throws IOException
    */
+  @Deprecated
   public Transaction beginWriteTransaction(String table, List<String> families)
     throws IOException;
 
@@ -85,6 +91,7 @@ public interface RevisionManager {
    * @return a new Transaction
    * @throws IOException
    */
+  @Deprecated
   public Transaction beginWriteTransaction(String table,
                        List<String> families, long keepAlive) throws IOException;
 
@@ -94,6 +101,7 @@ public interface RevisionManager {
    * @param transaction
    * @throws IOException
    */
+  @Deprecated
   public void commitWriteTransaction(Transaction transaction)
     throws IOException;
 
@@ -103,6 +111,7 @@ public interface RevisionManager {
    * @param transaction
    * @throws IOException
    */
+  @Deprecated
   public void abortWriteTransaction(Transaction transaction)
     throws IOException;
 
@@ -114,8 +123,9 @@ public interface RevisionManager {
    * @return a list of aborted WriteTransactions
    * @throws java.io.IOException
    */
+  @Deprecated
   public List<FamilyRevision> getAbortedWriteTransactions(String table,
-                              String columnFamily) throws IOException;
+      String columnFamily) throws IOException;
 
   /**
    * Create the latest snapshot of the table.
@@ -124,6 +134,7 @@ public interface RevisionManager {
    * @return a new snapshot
    * @throws IOException
    */
+  @Deprecated
   public TableSnapshot createSnapshot(String tableName) throws IOException;
 
   /**
@@ -134,6 +145,7 @@ public interface RevisionManager {
    * @return a new snapshot
    * @throws IOException
    */
+  @Deprecated
   public TableSnapshot createSnapshot(String tableName, long revision)
     throws IOException;
 
@@ -143,6 +155,7 @@ public interface RevisionManager {
    * @param transaction
    * @throws IOException
    */
+  @Deprecated
   public void keepAlive(Transaction transaction) throws IOException;
 
 }

Modified: hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java (original)
+++ hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java Fri Sep 20 00:32:55 2013
@@ -172,8 +172,12 @@ public abstract class SkeletonHBaseTest 
     protected int usageCount = 0;
 
     public Context(String handle) {
-      testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new Random().nextLong()) + "/").getPath();
-      System.out.println("Cluster work directory: " + testDir);
+      try {
+        testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new Random().nextLong()) + "/").getCanonicalPath();
+        System.out.println("Cluster work directory: " + testDir);
+      } catch (IOException e) {
+        throw new IllegalStateException("Failed to generate testDir", e);
+      }
     }
 
     public void start() {