You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/11/22 18:39:26 UTC

svn commit: r1641092 - in /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql: exec/ exec/persistence/ io/ metadata/ parse/ plan/

Author: hashutosh
Date: Sat Nov 22 17:39:26 2014
New Revision: 1641092

URL: http://svn.apache.org/r1641092
Log:
HIVE-8910 : Refactoring of PassThroughOutputFormat (Navis via Sushanth Sowmyan)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Sat Nov 22 17:39:26 2014
@@ -44,7 +44,6 @@ import org.apache.hadoop.hive.ql.io.Hive
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.HivePartitioner;
-import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
 import org.apache.hadoop.hive.ql.io.RecordUpdater;
 import org.apache.hadoop.hive.ql.io.StatsProvidingRecordWriter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -329,7 +328,7 @@ public class FileSinkOperator extends Te
       taskId = Utilities.getTaskId(hconf);
       initializeSpecPath();
       fs = specPath.getFileSystem(hconf);
-      hiveOutputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance();
+      hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, conf.getTableInfo());
       isCompressed = conf.getCompressed();
       parent = Utilities.toTempPath(conf.getDirName());
       statsCollectRawDataSize = conf.isStatsCollectRawDataSize();
@@ -338,6 +337,11 @@ public class FileSinkOperator extends Te
       serializer.initialize(null, conf.getTableInfo().getProperties());
       outputClass = serializer.getSerializedClass();
 
+      if (isLogInfoEnabled) {
+        LOG.info("Using serializer : " + serializer + " and formatter : " + hiveOutputFormat +
+            (isCompressed ? " with compression" : ""));
+      }
+
       // Timeout is chosen to make sure that even if one iteration takes more than
       // half of the script.timeout but less than script.timeout, we will still
       // be able to report progress.
@@ -1046,26 +1050,13 @@ public class FileSinkOperator extends Te
 
   public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException {
     if (hiveOutputFormat == null) {
+      Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), job);
       try {
-        if (getConf().getTableInfo().getJobProperties() != null) {
-             //Setting only for Storage Handler
-             if (getConf().getTableInfo().getJobProperties().get(HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY) != null) {
-                 job.set(HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY,getConf().getTableInfo().getJobProperties().get(HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY));
-                 hiveOutputFormat = ReflectionUtils.newInstance(conf.getTableInfo().getOutputFileFormatClass(),job);
-           }
-          else {
-                 hiveOutputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance();
-          }
-        }
-        else {
-              hiveOutputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance();
-        }
+        hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(job, getConf().getTableInfo());
       } catch (Exception ex) {
         throw new IOException(ex);
       }
     }
-    Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), job);
-
     if (conf.getTableInfo().isNonNative()) {
       //check the ouput specs only if it is a storage handler (native tables's outputformats does
       //not set the job's output properties correctly)

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Sat Nov 22 17:39:26 2014
@@ -1841,7 +1841,7 @@ public final class Utilities {
       Serializer serializer = (Serializer) tableInfo.getDeserializerClass().newInstance();
       serializer.initialize(null, tableInfo.getProperties());
       outputClass = serializer.getSerializedClass();
-      hiveOutputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance();
+      hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, conf.getTableInfo());
     } catch (SerDeException e) {
       throw new HiveException(e);
     } catch (InstantiationException e) {
@@ -3309,7 +3309,7 @@ public final class Utilities {
 
   @SuppressWarnings({"rawtypes", "unchecked"})
   private static Path createEmptyFile(Path hiveScratchDir,
-      Class<? extends HiveOutputFormat> outFileFormat, JobConf job,
+      HiveOutputFormat outFileFormat, JobConf job,
       int sequenceNumber, Properties props, boolean dummyRow)
           throws IOException, InstantiationException, IllegalAccessException {
 
@@ -3325,7 +3325,7 @@ public final class Utilities {
     String newFile = newDir + Path.SEPARATOR + "emptyFile";
     Path newFilePath = new Path(newFile);
 
-    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath,
+    RecordWriter recWriter = outFileFormat.getHiveRecordWriter(job, newFilePath,
         Text.class, false, props, null);
     if (dummyRow) {
       // empty files are omitted at CombineHiveInputFormat.
@@ -3341,28 +3341,29 @@ public final class Utilities {
   @SuppressWarnings("rawtypes")
   private static Path createDummyFileForEmptyPartition(Path path, JobConf job, MapWork work,
       Path hiveScratchDir, String alias, int sequenceNumber)
-          throws IOException, InstantiationException, IllegalAccessException {
+          throws Exception {
 
     String strPath = path.toString();
 
     // The input file does not exist, replace it by a empty file
     PartitionDesc partDesc = work.getPathToPartitionInfo().get(strPath);
-    boolean nonNative = partDesc.getTableDesc().isNonNative();
-    boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class;
-    Properties props = SerDeUtils.createOverlayedProperties(
-        partDesc.getTableDesc().getProperties(), partDesc.getProperties());
-    Class<? extends HiveOutputFormat> outFileFormat = partDesc.getOutputFileFormatClass();
-
-    if (nonNative) {
+    if (partDesc.getTableDesc().isNonNative()) {
       // if this isn't a hive table we can't create an empty file for it.
       return path;
     }
 
+    Properties props = SerDeUtils.createOverlayedProperties(
+        partDesc.getTableDesc().getProperties(), partDesc.getProperties());
+    HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, partDesc);
+
+    boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class;
+
     Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job,
         sequenceNumber, props, oneRow);
 
-
-    LOG.info("Changed input file to " + newPath);
+    if (LOG.isInfoEnabled()) {
+      LOG.info("Changed input file " + strPath + " to empty file " + newPath);
+    }
 
     // update the work
     String strNewPath = newPath.toString();
@@ -3384,23 +3385,23 @@ public final class Utilities {
   @SuppressWarnings("rawtypes")
   private static Path createDummyFileForEmptyTable(JobConf job, MapWork work,
       Path hiveScratchDir, String alias, int sequenceNumber)
-          throws IOException, InstantiationException, IllegalAccessException {
+          throws Exception {
 
     TableDesc tableDesc = work.getAliasToPartnInfo().get(alias).getTableDesc();
-    Properties props = tableDesc.getProperties();
-    boolean nonNative = tableDesc.isNonNative();
-    Class<? extends HiveOutputFormat> outFileFormat = tableDesc.getOutputFileFormatClass();
-
-    if (nonNative) {
+    if (tableDesc.isNonNative()) {
       // if this isn't a hive table we can't create an empty file for it.
       return null;
     }
 
+    Properties props = tableDesc.getProperties();
+    HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, tableDesc);
+
     Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job,
         sequenceNumber, props, false);
 
-
-    LOG.info("Changed input file to " + newPath.toString());
+    if (LOG.isInfoEnabled()) {
+      LOG.info("Changed input file for alias " + alias + " to " + newPath);
+    }
 
     // update the work
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java Sat Nov 22 17:39:26 2014
@@ -523,7 +523,7 @@ public class RowContainer<ROW extends Li
       tmpFile.deleteOnExit();
 
       // rFile = new RandomAccessFile(tmpFile, "rw");
-      HiveOutputFormat<?, ?> hiveOutputFormat = tblDesc.getOutputFileFormatClass().newInstance();
+      HiveOutputFormat<?, ?> hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(jc, tblDesc);
       tempOutPath = new Path(tmpFile.toString());
       JobConf localJc = getLocalFSJobConfClone(jc);
       rw = HiveFileFormatUtils.getRecordWriter(this.jobCloneUsingLocalFs,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java Sat Nov 22 17:39:26 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.io;
 
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -27,7 +26,9 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -35,7 +36,6 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -67,23 +67,16 @@ public final class HiveFileFormatUtils {
 
   static {
     outputFormatSubstituteMap =
-        new HashMap<Class<? extends OutputFormat>, Class<? extends HiveOutputFormat>>();
+        new ConcurrentHashMap<Class<?>, Class<? extends OutputFormat>>();
     HiveFileFormatUtils.registerOutputFormatSubstitute(
         IgnoreKeyTextOutputFormat.class, HiveIgnoreKeyTextOutputFormat.class);
     HiveFileFormatUtils.registerOutputFormatSubstitute(
         SequenceFileOutputFormat.class, HiveSequenceFileOutputFormat.class);
   }
 
-  private static ThreadLocal<String> tRealOutputFormat = new ThreadLocal<String>() {
-    @Override
-    protected String initialValue() {
-      return null;
-    }
-  };
-
   @SuppressWarnings("unchecked")
-  private static Map<Class<? extends OutputFormat>, Class<? extends HiveOutputFormat>>
-  outputFormatSubstituteMap;
+  private static Map<Class<?>, Class<? extends OutputFormat>>
+    outputFormatSubstituteMap;
 
   /**
    * register a substitute.
@@ -93,8 +86,7 @@ public final class HiveFileFormatUtils {
    * @param substitute
    */
   @SuppressWarnings("unchecked")
-  public static synchronized void registerOutputFormatSubstitute(
-      Class<? extends OutputFormat> origin,
+  public static void registerOutputFormatSubstitute(Class<?> origin,
       Class<? extends HiveOutputFormat> substitute) {
     outputFormatSubstituteMap.put(origin, substitute);
   }
@@ -103,44 +95,19 @@ public final class HiveFileFormatUtils {
    * get a OutputFormat's substitute HiveOutputFormat.
    */
   @SuppressWarnings("unchecked")
-  public static synchronized Class<? extends HiveOutputFormat> getOutputFormatSubstitute(
-      Class<?> origin, boolean storagehandlerflag) {
-    if (HiveOutputFormat.class.isAssignableFrom(origin)) {
-      return (Class<? extends HiveOutputFormat>) origin;
-    }
-    Class<? extends HiveOutputFormat> result = outputFormatSubstituteMap
-        .get(origin);
-    if ((storagehandlerflag == true) && (result == null || result == HivePassThroughOutputFormat.class)) {
-      HiveFileFormatUtils.setRealOutputFormatClassName(origin.getName());
-      result = HivePassThroughOutputFormat.class;
+  public static Class<? extends OutputFormat> getOutputFormatSubstitute(
+      Class<?> origin) {
+    if (origin == null || HiveOutputFormat.class.isAssignableFrom(origin)) {
+      return (Class<? extends OutputFormat>) origin;  // hive native
+    }
+    Class<? extends OutputFormat> substitute = outputFormatSubstituteMap.get(origin);
+    if (substitute != null) {
+      return substitute;  // substituted
     }
-    return result;
-  }
-
-  /**
-   * get a RealOutputFormatClassName corresponding to the HivePassThroughOutputFormat
-   */
-  @SuppressWarnings("unchecked")
-  public static String getRealOutputFormatClassName()
-  {
-    return tRealOutputFormat.get();
+    return (Class<? extends OutputFormat>) origin;
   }
 
   /**
-   * set a RealOutputFormatClassName corresponding to the HivePassThroughOutputFormat
-   */
-  public static void setRealOutputFormatClassName(
-      String destination) {
-    if (destination != null){
-      tRealOutputFormat.set(destination);
-    }
-    else {
-      return;
-    }
-  }
-
-
-  /**
    * get the final output path of a given FileOutputFormat.
    *
    * @param parent
@@ -279,39 +246,34 @@ public final class HiveFileFormatUtils {
   }
 
   public static RecordWriter getRecordWriter(JobConf jc,
-      HiveOutputFormat<?, ?> hiveOutputFormat,
-      final Class<? extends Writable> valueClass, boolean isCompressed,
+      OutputFormat<?, ?> outputFormat,
+      Class<? extends Writable> valueClass, boolean isCompressed,
       Properties tableProp, Path outPath, Reporter reporter
       ) throws IOException, HiveException {
-    if (hiveOutputFormat != null) {
-      return hiveOutputFormat.getHiveRecordWriter(jc, outPath, valueClass,
-          isCompressed, tableProp, reporter);
+    if (!(outputFormat instanceof HiveOutputFormat)) {
+      outputFormat = new HivePassThroughOutputFormat(outputFormat);
     }
-    return null;
+    return ((HiveOutputFormat)outputFormat).getHiveRecordWriter(
+        jc, outPath, valueClass, isCompressed, tableProp, reporter);
   }
 
-  private static HiveOutputFormat<?, ?> getHiveOutputFormat(JobConf jc, TableDesc tableInfo)
+  public static HiveOutputFormat<?, ?> getHiveOutputFormat(Configuration conf, TableDesc tableDesc)
       throws HiveException {
-    boolean storagehandlerofhivepassthru = false;
-    HiveOutputFormat<?, ?> hiveOutputFormat;
-    try {
-      if (tableInfo.getJobProperties() != null) {
-        if (tableInfo.getJobProperties().get(
-            HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY) != null) {
-          jc.set(HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY,
-              tableInfo.getJobProperties()
-                  .get(HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY));
-          storagehandlerofhivepassthru = true;
-        }
-      }
-      if (storagehandlerofhivepassthru) {
-        return ReflectionUtils.newInstance(tableInfo.getOutputFileFormatClass(), jc);
-      } else {
-        return tableInfo.getOutputFileFormatClass().newInstance();
-      }
-    } catch (Exception e) {
-      throw new HiveException(e);
+    return getHiveOutputFormat(conf, tableDesc.getOutputFileFormatClass());
+  }
+
+  public static HiveOutputFormat<?, ?> getHiveOutputFormat(Configuration conf, PartitionDesc partDesc)
+      throws HiveException {
+    return getHiveOutputFormat(conf, partDesc.getOutputFileFormatClass());
+  }
+
+  private static HiveOutputFormat<?, ?> getHiveOutputFormat(
+      Configuration conf, Class<? extends OutputFormat> outputClass) throws HiveException {
+    OutputFormat<?, ?> outputFormat = ReflectionUtils.newInstance(outputClass, conf);
+    if (!(outputFormat instanceof HiveOutputFormat)) {
+      outputFormat = new HivePassThroughOutputFormat(outputFormat);
     }
+    return (HiveOutputFormat<?, ?>) outputFormat;
   }
 
   public static RecordUpdater getAcidRecordUpdater(JobConf jc, TableDesc tableInfo, int bucket,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java Sat Nov 22 17:39:26 2014
@@ -21,80 +21,35 @@ package org.apache.hadoop.hive.ql.io;
 import java.io.IOException;
 import java.util.Properties;
 
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ReflectionUtils;
 
 /**
  *  This pass through class is used to wrap OutputFormat implementations such that new OutputFormats not derived from
  *  HiveOutputFormat gets through the checker
  */
+public class HivePassThroughOutputFormat<K, V> implements HiveOutputFormat<K, V>{
 
-public class HivePassThroughOutputFormat<K, V> implements Configurable, HiveOutputFormat<K, V>{
+  private final OutputFormat<?, ?> actualOutputFormat;
 
-  private OutputFormat<? super WritableComparable<?>, ? super Writable> actualOutputFormat;
-  private String actualOutputFormatClass = "";
-  private Configuration conf;
-  private boolean initialized;
-  public static final String HIVE_PASSTHROUGH_OF_CLASSNAME =
-                                  "org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat";
-
-  public static final String HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY =
-                                 "hive.passthrough.storagehandler.of";
-
-  public HivePassThroughOutputFormat() {
-    //construct this class through ReflectionUtils from FileSinkOperator
-    this.actualOutputFormat = null;
-    this.initialized = false;
-  }
-
-  private void createActualOF() throws IOException {
-    Class<? extends OutputFormat> cls;
-    try {
-      int e;
-      if (actualOutputFormatClass != null)
-       {
-        cls =
-           (Class<? extends OutputFormat>) Class.forName(actualOutputFormatClass, true,
-                Utilities.getSessionSpecifiedClassLoader());
-      } else {
-        throw new RuntimeException("Null pointer detected in actualOutputFormatClass");
-      }
-    } catch (ClassNotFoundException e) {
-      throw new IOException(e);
-    }
-    OutputFormat<? super WritableComparable<?>, ? super Writable> actualOF =
-         ReflectionUtils.newInstance(cls, this.getConf());
-    this.actualOutputFormat = actualOF;
+  public HivePassThroughOutputFormat(OutputFormat<?, ?> outputFormat) {
+    actualOutputFormat = outputFormat;
   }
 
   @Override
   public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException {
-    if (this.initialized == false) {
-      createActualOF();
-      this.initialized = true;
-    }
-   this.actualOutputFormat.checkOutputSpecs(ignored, job);
+    actualOutputFormat.checkOutputSpecs(ignored, job);
   }
 
   @Override
   public org.apache.hadoop.mapred.RecordWriter<K, V> getRecordWriter(FileSystem ignored,
        JobConf job, String name, Progressable progress) throws IOException {
-    if (this.initialized == false) {
-      createActualOF();
-      this.initialized = true;
-    }
-    return (RecordWriter<K, V>) this.actualOutputFormat.getRecordWriter(ignored,
+    return (RecordWriter<K, V>) actualOutputFormat.getRecordWriter(ignored,
                  job, name, progress);
   }
 
@@ -102,31 +57,12 @@ public class HivePassThroughOutputFormat
   public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(
       JobConf jc, Path finalOutPath, Class<? extends Writable> valueClass, boolean isCompressed,
       Properties tableProperties, Progressable progress) throws IOException {
-    if (this.initialized == false) {
-      createActualOF();
-    }
-    if (this.actualOutputFormat instanceof HiveOutputFormat) {
-      return ((HiveOutputFormat<K, V>) this.actualOutputFormat).getHiveRecordWriter(jc,
+    if (actualOutputFormat instanceof HiveOutputFormat) {
+      return ((HiveOutputFormat<K, V>) actualOutputFormat).getHiveRecordWriter(jc,
            finalOutPath, valueClass, isCompressed, tableProperties, progress);
     }
-    else {
-      FileSystem fs = finalOutPath.getFileSystem(jc);
-      HivePassThroughRecordWriter hivepassthroughrecordwriter = new HivePassThroughRecordWriter(
-              this.actualOutputFormat.getRecordWriter(fs, jc, null, progress));
-      return hivepassthroughrecordwriter;
-    }
-  }
-
-  @Override
-  public Configuration getConf() {
-    return conf;
-  }
-
-  @Override
-  public void setConf(Configuration config) {
-    if (config.get(HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY) != null) {
-      actualOutputFormatClass = config.get(HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY);
-    }
-    this.conf = config;
+    FileSystem fs = finalOutPath.getFileSystem(jc);
+    RecordWriter<?, ?> recordWriter = actualOutputFormat.getRecordWriter(fs, jc, null, progress);
+    return new HivePassThroughRecordWriter(recordWriter);
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java Sat Nov 22 17:39:26 2014
@@ -32,7 +32,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.ProtectMode;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -43,9 +42,9 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.transport.TMemoryBuffer;
@@ -70,7 +69,7 @@ public class Partition implements Serial
    * These fields are cached. The information comes from tPartition.
    */
   private Deserializer deserializer;
-  private Class<? extends HiveOutputFormat> outputFormatClass;
+  private Class<? extends OutputFormat> outputFormatClass;
   private Class<? extends InputFormat> inputFormatClass;
 
   /**
@@ -186,16 +185,14 @@ public class Partition implements Serial
       return;
     }
 
-    String partName = "";
     if (table.isPartitioned()) {
       try {
-        partName = Warehouse.makePartName(table.getPartCols(), tPartition.getValues());
+        String partName = Warehouse.makePartName(table.getPartCols(), tPartition.getValues());
         if (tPartition.getSd().getLocation() == null) {
           // set default if location is not set and this is a physical
           // table partition (not a view partition)
           if (table.getDataLocation() != null) {
-            Path partPath = new Path(
-              table.getDataLocation().toString(), partName);
+            Path partPath = new Path(table.getDataLocation(), partName);
             tPartition.getSd().setLocation(partPath.toString());
           }
         }
@@ -287,18 +284,16 @@ public class Partition implements Serial
   public void setOutputFormatClass(Class<? extends HiveOutputFormat> outputFormatClass) {
     this.outputFormatClass = outputFormatClass;
     tPartition.getSd().setOutputFormat(HiveFileFormatUtils
-        .getOutputFormatSubstitute(outputFormatClass, false).toString());
+        .getOutputFormatSubstitute(outputFormatClass).toString());
   }
 
   final public Class<? extends InputFormat> getInputFormatClass()
       throws HiveException {
     if (inputFormatClass == null) {
-      String clsName = null;
-      if (tPartition != null && tPartition.getSd() != null) {
-        clsName = tPartition.getSd().getInputFormat();
-      }
+      // sd can be null for views
+      String clsName = tPartition.getSd() == null ? null : tPartition.getSd().getInputFormat();
       if (clsName == null) {
-        clsName = org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName();
+        return inputFormatClass = table.getInputFormatClass();
       }
       try {
         inputFormatClass = ((Class<? extends InputFormat>) Class.forName(clsName, true,
@@ -310,25 +305,18 @@ public class Partition implements Serial
     return inputFormatClass;
   }
 
-  final public Class<? extends HiveOutputFormat> getOutputFormatClass()
+  final public Class<? extends OutputFormat> getOutputFormatClass()
       throws HiveException {
     if (outputFormatClass == null) {
-      String clsName = null;
-      if (tPartition != null && tPartition.getSd() != null) {
-        clsName = tPartition.getSd().getOutputFormat();
-      }
+      // sd can be null for views
+      String clsName = tPartition.getSd() == null ? null : tPartition.getSd().getOutputFormat();
       if (clsName == null) {
-        clsName = HiveSequenceFileOutputFormat.class.getName();
+        return outputFormatClass = table.getOutputFormatClass();
       }
       try {
-        Class<?> c = (Class.forName(clsName, true,
-            Utilities.getSessionSpecifiedClassLoader()));
+        Class<?> c = Class.forName(clsName, true, Utilities.getSessionSpecifiedClassLoader());
         // Replace FileOutputFormat for backward compatibility
-        if (!HiveOutputFormat.class.isAssignableFrom(c)) {
-          outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c,false);
-        } else {
-          outputFormatClass = (Class<? extends HiveOutputFormat>)c;
-        }
+        outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c);
       } catch (ClassNotFoundException e) {
         throw new HiveException("Class not found: " + clsName, e);
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Sat Nov 22 17:39:26 2014
@@ -49,8 +49,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
-import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
 import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -64,6 +62,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
 /**
@@ -85,10 +84,11 @@ public class Table implements Serializab
    * These fields are all cached fields.  The information comes from tTable.
    */
   private Deserializer deserializer;
-  private Class<? extends HiveOutputFormat> outputFormatClass;
+  private Class<? extends OutputFormat> outputFormatClass;
   private Class<? extends InputFormat> inputFormatClass;
   private Path path;
-  private HiveStorageHandler storageHandler;
+
+  private transient HiveStorageHandler storageHandler;
 
   /**
    * Used only for serialization.
@@ -222,7 +222,7 @@ public class Table implements Serializab
     tTable.getSd().setInputFormat(inputFormatClass.getName());
   }
 
-  public void setOutputFormatClass(Class<? extends HiveOutputFormat> outputFormatClass) {
+  public void setOutputFormatClass(Class<? extends OutputFormat> outputFormatClass) {
     this.outputFormatClass = outputFormatClass;
     tTable.getSd().setOutputFormat(outputFormatClass.getName());
   }
@@ -279,7 +279,7 @@ public class Table implements Serializab
   }
 
   public HiveStorageHandler getStorageHandler() {
-    if (storageHandler != null) {
+    if (storageHandler != null || !isNonNative()) {
       return storageHandler;
     }
     try {
@@ -313,9 +313,7 @@ public class Table implements Serializab
     return inputFormatClass;
   }
 
-  final public Class<? extends HiveOutputFormat> getOutputFormatClass() {
-    // Replace FileOutputFormat for backward compatibility
-    boolean storagehandler = false;
+  final public Class<? extends OutputFormat> getOutputFormatClass() {
     if (outputFormatClass == null) {
       try {
         String className = tTable.getSd().getOutputFormat();
@@ -326,34 +324,10 @@ public class Table implements Serializab
           }
           c = getStorageHandler().getOutputFormatClass();
         } else {
-            // if HivePassThroughOutputFormat
-            if (className.equals(
-                 HivePassThroughOutputFormat.HIVE_PASSTHROUGH_OF_CLASSNAME)) {
-              if (getStorageHandler() != null) {
-                // get the storage handler real output format class
-                c = getStorageHandler().getOutputFormatClass();
-              }
-              else {
-                //should not happen
-                return null;
-              }
-            }
-            else {
-              c = Class.forName(className, true,
-                  Utilities.getSessionSpecifiedClassLoader());
-            }
-        }
-        if (!HiveOutputFormat.class.isAssignableFrom(c)) {
-          if (getStorageHandler() != null) {
-            storagehandler = true;
-          }
-          else {
-            storagehandler = false;
-          }
-          outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c,storagehandler);
-        } else {
-          outputFormatClass = (Class<? extends HiveOutputFormat>)c;
+          c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
         }
+        // Replace FileOutputFormat for backward compatibility
+        outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c);
       } catch (ClassNotFoundException e) {
         throw new RuntimeException(e);
       }
@@ -708,8 +682,7 @@ public class Table implements Serializab
     }
     try {
       Class<?> origin = Class.forName(name, true, Utilities.getSessionSpecifiedClassLoader());
-      setOutputFormatClass(HiveFileFormatUtils
-          .getOutputFormatSubstitute(origin,false));
+      setOutputFormatClass(HiveFileFormatUtils.getOutputFormatSubstitute(origin));
     } catch (ClassNotFoundException e) {
       throw new HiveException("Class not found: " + name, e);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Sat Nov 22 17:39:26 2014
@@ -36,7 +36,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -51,7 +50,6 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
-import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -63,6 +61,7 @@ import org.apache.hadoop.hive.ql.plan.Lo
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.mapred.OutputFormat;
 
 /**
  * ImportSemanticAnalyzer.
@@ -462,8 +461,8 @@ public class ImportSemanticAnalyzer exte
        */
       try {
         Class<?> origin = Class.forName(importedofc, true, Utilities.getSessionSpecifiedClassLoader());
-        Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils
-            .getOutputFormatSubstitute(origin,false);
+        Class<? extends OutputFormat> replaced = HiveFileFormatUtils
+            .getOutputFormatSubstitute(origin);
         if (replaced == null) {
           throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
             .getMsg());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sat Nov 22 17:39:26 2014
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.parse;
 
 import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
 
-import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableList.Builder;
@@ -216,6 +215,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.OutputFormat;
 import org.eigenbase.rel.AggregateCall;
 import org.eigenbase.rel.AggregateRelBase;
 import org.eigenbase.rel.Aggregation;
@@ -1735,7 +1735,8 @@ public class SemanticAnalyzer extends Ba
           }
 
           Class<?> outputFormatClass = ts.tableHandle.getOutputFormatClass();
-          if (!HiveOutputFormat.class.isAssignableFrom(outputFormatClass)) {
+          if (!ts.tableHandle.isNonNative() &&
+              !HiveOutputFormat.class.isAssignableFrom(outputFormatClass)) {
             throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
                 .getMsg(ast, "The class is " + outputFormatClass.toString()));
           }
@@ -12449,7 +12450,7 @@ public class SemanticAnalyzer extends Ba
     return tableIsTransactional != null && tableIsTransactional.equalsIgnoreCase("true");
   }
 
-  private boolean isAcidOutputFormat(Class<? extends HiveOutputFormat> of) {
+  private boolean isAcidOutputFormat(Class<? extends OutputFormat> of) {
     Class<?>[] interfaces = of.getInterfaces();
     for (Class<?> iface : interfaces) {
       if (iface.equals(AcidOutputFormat.class)) {
@@ -12467,7 +12468,7 @@ public class SemanticAnalyzer extends Ba
             AcidUtils.Operation.INSERT);
   }
 
-  private AcidUtils.Operation getAcidType(Class<? extends HiveOutputFormat> of) {
+  private AcidUtils.Operation getAcidType(Class<? extends OutputFormat> of) {
     if (SessionState.get() == null || !SessionState.get().getTxnMgr().supportsAcid()) {
       return AcidUtils.Operation.NOT_ACID;
     } else if (isAcidOutputFormat(of)) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Sat Nov 22 17:39:26 2014
@@ -26,7 +26,6 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
@@ -40,6 +39,7 @@ import org.apache.hadoop.hive.ql.parse.P
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.mapred.OutputFormat;
 
 /**
  * CreateTableDesc.
@@ -420,14 +420,14 @@ public class CreateTableDesc extends DDL
       try {
         Class<?> origin = Class.forName(this.getOutputFormat(), true,
           Utilities.getSessionSpecifiedClassLoader());
-        Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils
-          .getOutputFormatSubstitute(origin,false);
-        if (replaced == null) {
+        Class<? extends OutputFormat> replaced = HiveFileFormatUtils
+          .getOutputFormatSubstitute(origin);
+        if (!HiveOutputFormat.class.isAssignableFrom(replaced)) {
           throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
             .getMsg());
         }
       } catch (ClassNotFoundException e) {
-        throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg());
+        throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e);
       }
     }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Sat Nov 22 17:39:26 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -58,7 +59,7 @@ public class PartitionDesc implements Se
   private TableDesc tableDesc;
   private LinkedHashMap<String, String> partSpec;
   private Class<? extends InputFormat> inputFileFormatClass;
-  private Class<? extends HiveOutputFormat> outputFileFormatClass;
+  private Class<? extends OutputFormat> outputFileFormatClass;
   private Properties properties;
 
   private String baseFileName;
@@ -148,7 +149,7 @@ public class PartitionDesc implements Se
     }
   }
 
-  public Class<? extends HiveOutputFormat> getOutputFileFormatClass() {
+  public Class<? extends OutputFormat> getOutputFileFormatClass() {
     if (outputFileFormatClass == null && tableDesc != null) {
       setOutputFileFormatClass(tableDesc.getOutputFileFormatClass());
     }
@@ -156,8 +157,8 @@ public class PartitionDesc implements Se
   }
 
   public void setOutputFileFormatClass(final Class<?> outputFileFormatClass) {
-    Class<? extends HiveOutputFormat> outputClass = outputFileFormatClass == null ? null :
-      HiveFileFormatUtils.getOutputFormatSubstitute(outputFileFormatClass,false);
+    Class<? extends OutputFormat> outputClass = outputFileFormatClass == null ? null :
+      HiveFileFormatUtils.getOutputFormatSubstitute(outputFileFormatClass);
     if (outputClass != null) {
       this.outputFileFormatClass = (Class<? extends HiveOutputFormat>) 
         CLASS_INTERNER.intern(outputClass);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Sat Nov 22 17:39:26 2014
@@ -42,9 +42,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
-import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
@@ -827,13 +825,6 @@ public final class PlanUtils {
                     "using configureTableJobProperties",e);
                 storageHandler.configureTableJobProperties(tableDesc, jobProperties);
             }
-            if (tableDesc.getOutputFileFormatClass().getName()
-                     == HivePassThroughOutputFormat.HIVE_PASSTHROUGH_OF_CLASSNAME) {
-             // get the real output format when we register this for the table
-             jobProperties.put(
-                 HivePassThroughOutputFormat.HIVE_PASSTHROUGH_STORAGEHANDLER_OF_JOBCONFKEY,
-                 HiveFileFormatUtils.getRealOutputFormatClassName());
-           }
         }
         // Job properties are only relevant for non-native tables, so
         // for native tables, leave it null to avoid cluttering up

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=1641092&r1=1641091&r2=1641092&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Sat Nov 22 17:39:26 2014
@@ -28,12 +28,11 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
-import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.OutputFormat;
 
 /**
  * TableDesc.
@@ -42,7 +41,7 @@ import org.apache.hadoop.mapred.InputFor
 public class TableDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
   private Class<? extends InputFormat> inputFileFormatClass;
-  private Class<? extends HiveOutputFormat> outputFileFormatClass;
+  private Class<? extends OutputFormat> outputFileFormatClass;
   private java.util.Properties properties;
   private Map<String, String> jobProperties;
 
@@ -59,7 +58,7 @@ public class TableDesc implements Serial
       final Class<?> outputFormatClass, final Properties properties) {
     this.inputFileFormatClass = inputFormatClass;
     outputFileFormatClass = HiveFileFormatUtils
-        .getOutputFormatSubstitute(outputFormatClass, false);
+        .getOutputFormatSubstitute(outputFormatClass);
     this.properties = properties;
   }
 
@@ -94,13 +93,13 @@ public class TableDesc implements Serial
     this.inputFileFormatClass = inputFileFormatClass;
   }
 
-  public Class<? extends HiveOutputFormat> getOutputFileFormatClass() {
+  public Class<? extends OutputFormat> getOutputFileFormatClass() {
     return outputFileFormatClass;
   }
 
-  public void setOutputFileFormatClass(final Class<?> outputFileFormatClass) {
+  public void setOutputFileFormatClass(Class<?> outputFileFormatClass) {
     this.outputFileFormatClass = HiveFileFormatUtils
-        .getOutputFormatSubstitute(outputFileFormatClass, false);
+        .getOutputFormatSubstitute(outputFileFormatClass);
   }
 
   @Explain(displayName = "properties", normalExplain = false)
@@ -142,12 +141,7 @@ public class TableDesc implements Serial
 
   @Explain(displayName = "output format")
   public String getOutputFileFormatClassName() {
-    if (getOutputFileFormatClass().getName() == HivePassThroughOutputFormat.HIVE_PASSTHROUGH_OF_CLASSNAME) {
-      return HiveFileFormatUtils.getRealOutputFormatClassName();
-    }
-    else {
-      return getOutputFileFormatClass().getName();
-    }
+    return getOutputFileFormatClass().getName();
   }
 
   public boolean isNonNative() {