You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rh...@apache.org on 2014/04/06 20:31:01 UTC
svn commit: r1585325 -
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
Author: rhbutani
Date: Sun Apr 6 18:31:01 2014
New Revision: 1585325
URL: http://svn.apache.org/r1585325
Log:
HIVE-6848 importing into an existing table fails (Harish Butani via Ashutosh Chauhan)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1585325&r1=1585324&r2=1585325&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Sun Apr 6 18:31:01 2014
@@ -23,6 +23,7 @@ import org.apache.commons.lang.ObjectUti
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
@@ -35,6 +36,8 @@ import org.apache.hadoop.hive.ql.exec.Ta
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -437,6 +440,22 @@ public class ImportSemanticAnalyzer exte
String importedifc = tableDesc.getInputFormat();
String existingofc = table.getOutputFormatClass().getName();
String importedofc = tableDesc.getOutputFormat();
+ /*
+ * substitute OutputFormat name based on HiveFileFormatUtils.outputFormatSubstituteMap
+ */
+ try {
+ Class<?> origin = Class.forName(importedofc, true, JavaUtils.getClassLoader());
+ Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils
+ .getOutputFormatSubstitute(origin,false);
+ if (replaced == null) {
+ throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
+ .getMsg());
+ }
+ importedofc = replaced.getCanonicalName();
+ } catch(Exception e) {
+ throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
+ .getMsg());
+ }
if ((!existingifc.equals(importedifc))
|| (!existingofc.equals(importedofc))) {
throw new SemanticException(
@@ -454,6 +473,11 @@ public class ImportSemanticAnalyzer exte
.getSerdeParam(serdeConstants.SERIALIZATION_FORMAT);
String importedSerdeFormat = tableDesc.getSerdeProps().get(
serdeConstants.SERIALIZATION_FORMAT);
+ /*
+ * If Imported SerdeFormat is null, then set it to "1" just as
+ * metadata.Table.getEmptyTable
+ */
+ importedSerdeFormat = importedSerdeFormat == null ? "1" : importedSerdeFormat;
if (!ObjectUtils.equals(existingSerdeFormat, importedSerdeFormat)) {
throw new SemanticException(
ErrorMsg.INCOMPATIBLE_SCHEMA