You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/08/28 00:30:01 UTC
svn commit: r1620988 - in /hive/trunk: itests/src/test/resources/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/exec/tez/
Author: gunther
Date: Wed Aug 27 22:30:01 2014
New Revision: 1620988
URL: http://svn.apache.org/r1620988
Log:
HIVE-7701: Upgrading tez to 0.4.1 causes metadata only query to fail. (Gunther Hagleitner, reviewed by Vikram Dixit K)
Modified:
hive/trunk/itests/src/test/resources/testconfiguration.properties
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
Modified: hive/trunk/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/trunk/itests/src/test/resources/testconfiguration.properties?rev=1620988&r1=1620987&r2=1620988&view=diff
==============================================================================
--- hive/trunk/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/trunk/itests/src/test/resources/testconfiguration.properties Wed Aug 27 22:30:01 2014
@@ -86,6 +86,7 @@ minitez.query.files.shared=alter_merge_2
mapreduce2.q,\
merge1.q,\
merge2.q,\
+ metadataonly1.q,\
metadata_only_queries.q,\
optimize_nullscan.q,\
orc_analyze.q,\
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1620988&r1=1620987&r2=1620988&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Aug 27 22:30:01 2014
@@ -111,6 +111,7 @@ import org.apache.hadoop.hive.ql.exec.mr
import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
import org.apache.hadoop.hive.ql.exec.mr.ExecReducer;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
+import org.apache.hadoop.hive.ql.exec.tez.DagUtils;
import org.apache.hadoop.hive.ql.exec.tez.TezTask;
import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
@@ -3074,7 +3075,7 @@ public final class Utilities {
* so we don't want to depend on scratch dir and context.
*/
public static List<Path> getInputPathsTez(JobConf job, MapWork work) throws Exception {
- String scratchDir = HiveConf.getVar(job, HiveConf.ConfVars.SCRATCHDIR);
+ String scratchDir = job.get(DagUtils.TEZ_TMP_DIR_KEY);
// we usually don't want to create dummy files for tez, however the metadata only
// optimization relies on it.
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java?rev=1620988&r1=1620987&r2=1620988&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java Wed Aug 27 22:30:01 2014
@@ -124,6 +124,7 @@ import com.google.protobuf.ByteString;
*/
public class DagUtils {
+ public static final String TEZ_TMP_DIR_KEY = "_hive_tez_tmp_dir";
private static final Log LOG = LogFactory.getLog(DagUtils.class.getName());
private static final String TEZ_DIR = "_tez_scratch_dir";
private static DagUtils instance;
@@ -158,7 +159,7 @@ public class DagUtils {
* Creates the configuration object necessary to run a specific vertex from
* map work. This includes input formats, input processor, etc.
*/
- private JobConf initializeVertexConf(JobConf baseConf, MapWork mapWork) {
+ private JobConf initializeVertexConf(JobConf baseConf, Context context, MapWork mapWork) {
JobConf conf = new JobConf(baseConf);
if (mapWork.getNumMapTasks() != null) {
@@ -200,6 +201,7 @@ public class DagUtils {
inpFormat = CombineHiveInputFormat.class.getName();
}
+ conf.set(TEZ_TMP_DIR_KEY, context.getMRTmpPath().toUri().toString());
conf.set("mapred.mapper.class", ExecMapper.class.getName());
conf.set("mapred.input.format.class", inpFormat);
@@ -524,7 +526,7 @@ public class DagUtils {
/*
* Helper function to create JobConf for specific ReduceWork.
*/
- private JobConf initializeVertexConf(JobConf baseConf, ReduceWork reduceWork) {
+ private JobConf initializeVertexConf(JobConf baseConf, Context context, ReduceWork reduceWork) {
JobConf conf = new JobConf(baseConf);
conf.set("mapred.reducer.class", ExecReducer.class.getName());
@@ -896,14 +898,14 @@ public class DagUtils {
* @param work BaseWork will be used to populate the configuration object.
* @return JobConf new configuration object
*/
- public JobConf initializeVertexConf(JobConf conf, BaseWork work) {
+ public JobConf initializeVertexConf(JobConf conf, Context context, BaseWork work) {
// simply dispatch the call to the right method for the actual (sub-) type of
// BaseWork.
if (work instanceof MapWork) {
- return initializeVertexConf(conf, (MapWork)work);
+ return initializeVertexConf(conf, context, (MapWork)work);
} else if (work instanceof ReduceWork) {
- return initializeVertexConf(conf, (ReduceWork)work);
+ return initializeVertexConf(conf, context, (ReduceWork)work);
} else {
assert false;
return null;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java?rev=1620988&r1=1620987&r2=1620988&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java Wed Aug 27 22:30:01 2014
@@ -263,7 +263,7 @@ public class TezTask extends Task<TezWor
}
} else {
// Regular vertices
- JobConf wxConf = utils.initializeVertexConf(conf, w);
+ JobConf wxConf = utils.initializeVertexConf(conf, ctx, w);
Vertex wx = utils.createVertex(wxConf, w, scratchDir, appJarLr,
additionalLr, fs, ctx, !isFinal, work);
dag.addVertex(wx);