You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by ga...@apache.org on 2012/04/05 05:52:32 UTC

svn commit: r1309656 - in /incubator/hcatalog/branches/branch-0.4: CHANGES.txt src/java/org/apache/hcatalog/mapreduce/InitializeInput.java

Author: gates
Date: Thu Apr  5 05:52:31 2012
New Revision: 1309656

URL: http://svn.apache.org/viewvc?rev=1309656&view=rev
Log:
HCATALOG-320 Remove filtering of table properties which are not prefix with "hcat."

Modified:
    incubator/hcatalog/branches/branch-0.4/CHANGES.txt
    incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java

Modified: incubator/hcatalog/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/CHANGES.txt?rev=1309656&r1=1309655&r2=1309656&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/CHANGES.txt (original)
+++ incubator/hcatalog/branches/branch-0.4/CHANGES.txt Thu Apr  5 05:52:31 2012
@@ -96,6 +96,8 @@ Release 0.4.0 - Unreleased
   OPTIMIZATIONS
 
   BUG FIXES
+  HCAT-320 Remove filtering of table properties which are not prefix with "hcat." (rohini via gates)
+
   HCAT-354 hive-hbase-handler jar not getting put in hcat package (gates)
 
   HCAT-348 HCatUtil::getHiveConf(Configuration) ignores passed-in conf while creating HiveConf (gates)

Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java?rev=1309656&r1=1309655&r2=1309656&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java Thu Apr  5 05:52:31 2012
@@ -27,30 +27,13 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
-import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveUtils;
-import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.hive.serde2.Deserializer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
-
 import org.apache.hcatalog.common.ErrorType;
 import org.apache.hcatalog.common.HCatConstants;
 import org.apache.hcatalog.common.HCatException;
@@ -67,8 +50,6 @@ public class InitializeInput {
   
   private static final Log LOG = LogFactory.getLog(InitializeInput.class);
 
-  /** The prefix for keys used for storage handler arguments */
-  static final String HCAT_KEY_PREFIX = "hcat.";
   private static HiveConf hiveConf;
 
   private static HiveMetaStoreClient createHiveMetaClient(Configuration conf) throws Exception {
@@ -190,9 +171,7 @@ public class InitializeInput {
                                                             inputJobInfo);
 
     for (String key : parameters.keySet()){
-      if (key.startsWith(HCAT_KEY_PREFIX)){
         hcatProperties.put(key, parameters.get(key));
-      }
     }
     // FIXME 
     // Bloating partinfo with inputJobInfo is not good