You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/10/29 19:13:44 UTC
svn commit: r1635243 - in /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql:
exec/Utilities.java io/HiveInputFormat.java io/orc/OrcInputFormat.java
Author: gunther
Date: Wed Oct 29 18:13:44 2014
New Revision: 1635243
URL: http://svn.apache.org/r1635243
Log:
HIVE-8637: In insert into X select from Y, table properties from X are clobbering those from Y (Alan Gates via Gunther Hagleitner)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1635243&r1=1635242&r2=1635243&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Oct 29 18:13:44 2014
@@ -2354,6 +2354,32 @@ public final class Utilities {
}
}
+ /**
+ * Copies the storage handler proeprites configured for a table descriptor to a runtime job
+ * configuration. This differs from {@link #copyTablePropertiesToConf(org.apache.hadoop.hive.ql.plan.TableDesc, org.apache.hadoop.mapred.JobConf)}
+ * in that it does not allow parameters already set in the job to override the values from the
+ * table. This is important for setting the config up for reading,
+ * as the job may already have values in it from another table.
+ * @param tbl
+ * @param job
+ */
+ public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) {
+ Properties tblProperties = tbl.getProperties();
+ for(String name: tblProperties.stringPropertyNames()) {
+ String val = (String) tblProperties.get(name);
+ if (val != null) {
+ job.set(name, StringEscapeUtils.escapeJava(val));
+ }
+ }
+ Map<String, String> jobProperties = tbl.getJobProperties();
+ if (jobProperties == null) {
+ return;
+ }
+ for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
+ job.set(entry.getKey(), entry.getValue());
+ }
+ }
+
private static final Object INPUT_SUMMARY_LOCK = new Object();
/**
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java?rev=1635243&r1=1635242&r2=1635243&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java Wed Oct 29 18:13:44 2014
@@ -275,7 +275,7 @@ public class HiveInputFormat<K extends W
InputFormat inputFormat, Class<? extends InputFormat> inputFormatClass, int splits,
TableDesc table, List<InputSplit> result) throws IOException {
- Utilities.copyTableJobPropertiesToConf(table, conf);
+ Utilities.copyTablePropertiesToConf(table, conf);
if (tableScan != null) {
pushFilters(conf, tableScan);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java?rev=1635243&r1=1635242&r2=1635243&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java Wed Oct 29 18:13:44 2014
@@ -388,6 +388,7 @@ public class OrcInputFormat implements
ConfVars.HIVE_ORC_INCLUDE_FILE_FOOTER_IN_SPLITS);
numBuckets =
Math.max(conf.getInt(hive_metastoreConstants.BUCKET_COUNT, 0), 0);
+ LOG.debug("Number of buckets specified by conf file is " + numBuckets);
int cacheStripeDetailsSize = HiveConf.getIntVar(conf,
ConfVars.HIVE_ORC_CACHE_STRIPE_DETAILS_SIZE);
int numThreads = HiveConf.getIntVar(conf,