You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@hive.apache.org by luo butter <bu...@gmail.com> on 2015/12/21 03:29:32 UTC

"java.lang.RuntimeException: Reduce operator initialization failed" when running hive on spark

hi,

When I used hive on spark it thrown* below exceptions* when processing map
side join:

*java.lang.RuntimeException: Reduce operator initialization failed*
at
org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.init(SparkReduceRecordHandler.java:224)
at
org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunction.call(HiveReduceFunction.java:46)
at
org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunction.call(HiveReduceFunction.java:28)
at
org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
at
org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
at
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
at
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
at org.apache.spark.scheduler.Task.run(Task.scala:70)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
org.apache.hadoop.hive.ql.metadata.HiveException:
org.apache.hadoop.hive.ql.metadata.HiveException: Error while trying to
create table container
at
org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:57)
at
org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieveAsync(ObjectCache.java:63)
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator.initializeOp(MapJoinOperator.java:166)
at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:362)
at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:481)
at
org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:438)
at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:375)
at
org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.init(SparkReduceRecordHandler.java:214)
... 25 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
org.apache.hadoop.hive.ql.metadata.HiveException: Error while trying to
create table container
at
org.apache.hadoop.hive.ql.exec.spark.HashTableLoader.load(HashTableLoader.java:120)
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator.loadHashTable(MapJoinOperator.java:288)
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator$1.call(MapJoinOperator.java:173)
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator$1.call(MapJoinOperator.java:169)
at
org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:55)
... 32 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error while
trying to create table container
at
org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe.load(MapJoinTableContainerSerDe.java:159)
at
org.apache.hadoop.hive.ql.exec.spark.HashTableLoader.load(HashTableLoader.java:117)
... 36 more
*Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error, not a
directory: *hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
at
org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe.load(MapJoinTableContainerSerDe.java:107)
... 37 more

*AND*

*java.lang.RuntimeException: Hive Runtime Error while closing operators:
Unexpected exception: null*
at
org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.close(SparkReduceRecordHandler.java:439)
at
org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunctionResultList.closeRecordProcessor(HiveReduceFunctionResultList.java:59)
at
org.apache.hadoop.hive.ql.exec.spark.HiveBaseFunctionResultList$ResultIterator.hasNext(HiveBaseFunctionResultList.java:106)
at
scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:41)
at
org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:216)
at
org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:62)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
at org.apache.spark.scheduler.Task.run(Task.scala:70)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Unexpected
exception: null
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:426)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
at
org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
at
org.apache.hadoop.hive.ql.exec.CommonJoinOperator.internalForward(CommonJoinOperator.java:644)
at
org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genUniqueJoinObject(CommonJoinOperator.java:657)
at
org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genUniqueJoinObject(CommonJoinOperator.java:660)
at
org.apache.hadoop.hive.ql.exec.CommonJoinOperator.checkAndGenObject(CommonJoinOperator.java:756)
at
org.apache.hadoop.hive.ql.exec.JoinOperator.endGroup(JoinOperator.java:256)
at
org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.close(SparkReduceRecordHandler.java:418)
... 12 more
*Caused by: java.lang.NullPointerException*
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator.getRefKey(MapJoinOperator.java:327)
at
org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:339)
... 21 more


The hive version is *hive1.2.1* . And I used hint liked
"/*+mapjoin(some_tmp_table)*/".

When I fetched the yarn app log from hdfs, and greped the log using* "grep
 '/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/'
890.log | sort | uniq | grep 'HashTable-Stage-' | sort | uniq"*
*I found below lines:*
grep
 '/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/'
890.log | sort | uniq | grep 'HashTable-Stage-' | sort | uniq
15/12/18 15:19:56 INFO exec.HashTableSinkOperator: 2015-12-18 15:19:56
 Dump the side-table for tag: 1 with group count: 2323 into file:
hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
15/12/18 15:19:56 INFO exec.HashTableSinkOperator: 2015-12-18 15:19:56
 Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
(0 bytes)
15/12/18 15:19:56 INFO exec.SparkHashTableSinkOperator: Temp URI for side
table: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
15/12/18 15:21:03 INFO exec.HashTableSinkOperator: 2015-12-18 15:21:03
 Dump the side-table for tag: 1 with group count: 2323 into file:
hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
15/12/18 15:21:03 INFO exec.HashTableSinkOperator: 2015-12-18 15:21:03
 Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
(0 bytes)
15/12/18 15:21:03 INFO exec.SparkHashTableSinkOperator: Temp URI for side
table: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
15/12/18 15:24:46 INFO exec.HashTableSinkOperator: 2015-12-18 15:24:46
 Dump the side-table for tag: 1 with group count: 1 into file:
hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
15/12/18 15:24:46 INFO exec.HashTableSinkOperator: 2015-12-18 15:24:46
 Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
(0 bytes)
15/12/18 15:24:46 INFO exec.SparkHashTableSinkOperator: Temp URI for side
table: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1
15/12/18 15:25:36 INFO exec.HashTableSinkOperator: 2015-12-18 15:25:36
 Dump the side-table for tag: 1 with group count: 2323 into file:
hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
15/12/18 15:25:36 INFO exec.HashTableSinkOperator: 2015-12-18 15:25:36
 Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
(0 bytes)
15/12/18 15:25:36 INFO exec.SparkHashTableSinkOperator: Temp URI for side
table: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
15/12/18 15:28:01 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:01
 Dump the side-table for tag: 1 with group count: 1 into file:
hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
15/12/18 15:28:01 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:01
 Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
(0 bytes)
15/12/18 15:28:01 INFO exec.SparkHashTableSinkOperator: Temp URI for side
table: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1
15/12/18 15:28:09 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:09
 Dump the side-table for tag: 1 with group count: 2323 into file:
hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
15/12/18 15:28:09 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:09
 Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
(0 bytes)
15/12/18 15:28:09 INFO exec.SparkHashTableSinkOperator: Temp URI for side
table: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
15/12/18 15:41:24 INFO spark.HashTableLoader:   Load back all hashtable
files from tmp folder uri:hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
15/12/18 15:41:25 INFO spark.HashTableLoader:   Load back all hashtable
files from tmp folder uri:hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
2015-12-18 15:19:56     Dump the side-table for tag: 1 with group count:
2323 into file: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
2015-12-18 15:19:56     Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
(0 bytes)
2015-12-18 15:21:03     Dump the side-table for tag: 1 with group count:
2323 into file: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
2015-12-18 15:21:03     Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
(0 bytes)
2015-12-18 15:24:46     Dump the side-table for tag: 1 with group count: 1
into file: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
2015-12-18 15:24:46     Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
(0 bytes)
2015-12-18 15:25:36     Dump the side-table for tag: 1 with group count:
2323 into file: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
2015-12-18 15:25:36     Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
(0 bytes)
2015-12-18 15:28:01     Dump the side-table for tag: 1 with group count: 1
into file: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
2015-12-18 15:28:01     Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
(0 bytes)
2015-12-18 15:28:09     Dump the side-table for tag: 1 with group count:
2323 into file: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
2015-12-18 15:28:09     Uploaded 1 File to: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
(0 bytes)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error, not a
directory: hdfs://{nameNode
ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable

That means nothing has been dump to the path
"/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable"
when processing the map side join.


Any help will be appreciated!

Re: "java.lang.RuntimeException: Reduce operator initialization failed" when running hive on spark

Posted by Jone Zhang <jo...@gmail.com>.
I also encountered the same problem.

The error log in Spark UI are as follows

Job aborted due to stage failure: Task 465 in stage 12.0 failed 4
times, most recent failure: Lost task 465.3 in stage 12.0 (TID 6732,
10.148.147.52): java.lang.RuntimeException:
org.apache.hadoop.hive.ql.metadata.HiveException: Error while
processing row (tag=0)
{"key":{"_col0":"TMAF_610_F_2179","_col1":"200402","_col2":"203901","_col3":"08_001_00;-1","_col4":"08","_col5":"001","_col6":"1001013133098807296","_col7":100,"_col8":"welfarecenter&&10451659"},"value":{"_col0":1}}
	at org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.processRow(SparkReduceRecordHandler.java:293)
	at org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunctionResultList.processNextRecord(HiveReduceFunctionResultList.java:49)
	at org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunctionResultList.processNextRecord(HiveReduceFunctionResultList.java:28)
	at org.apache.hadoop.hive.ql.exec.spark.HiveBaseFunctionResultList$ResultIterator.hasNext(HiveBaseFunctionResultList.java:95)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:41)
	at org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:216)
	at org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:62)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
	at org.apache.spark.scheduler.Task.run(Task.scala:70)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error
while processing row (tag=0)
{"key":{"_col0":"TMAF_610_F_2179","_col1":"200402","_col2":"203901","_col3":"08_001_00;-1","_col4":"08","_col5":"001","_col6":"1001013133098807296","_col7":100,"_col8":"welfarecenter&&10451659"},"value":{"_col0":1}}
	at org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.processKeyValues(SparkReduceRecordHandler.java:332)
	at org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.processRow(SparkReduceRecordHandler.java:281)
	... 13 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
Unexpected exception: null
	at org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:426)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
	at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
	at org.apache.hadoop.hive.ql.exec.GroupByOperator.forward(GroupByOperator.java:1016)
	at org.apache.hadoop.hive.ql.exec.GroupByOperator.processAggr(GroupByOperator.java:821)
	at org.apache.hadoop.hive.ql.exec.GroupByOperator.processKey(GroupByOperator.java:695)
	at org.apache.hadoop.hive.ql.exec.GroupByOperator.process(GroupByOperator.java:761)
	at org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.processKeyValues(SparkReduceRecordHandler.java:323)
	... 14 more
Caused by: java.lang.NullPointerException
	at org.apache.hadoop.hive.ql.exec.MapJoinOperator.getRefKey(MapJoinOperator.java:327)
	at org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:339)
	... 22 more

Driver stacktrace:


2015-12-21 10:29 GMT+08:00 luo butter <bu...@gmail.com>:

> hi,
>
> When I used hive on spark it thrown* below exceptions* when processing
> map side join:
>
> *java.lang.RuntimeException: Reduce operator initialization failed*
> at
> org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.init(SparkReduceRecordHandler.java:224)
> at
> org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunction.call(HiveReduceFunction.java:46)
> at
> org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunction.call(HiveReduceFunction.java:28)
> at
> org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
> at
> org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70)
> at
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
> at org.apache.spark.scheduler.Task.run(Task.scala:70)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.ql.metadata.HiveException: Error while trying to
> create table container
> at
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:57)
> at
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieveAsync(ObjectCache.java:63)
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator.initializeOp(MapJoinOperator.java:166)
> at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:362)
> at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:481)
> at
> org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:438)
> at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:375)
> at
> org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.init(SparkReduceRecordHandler.java:214)
> ... 25 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.ql.metadata.HiveException: Error while trying to
> create table container
> at
> org.apache.hadoop.hive.ql.exec.spark.HashTableLoader.load(HashTableLoader.java:120)
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator.loadHashTable(MapJoinOperator.java:288)
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator$1.call(MapJoinOperator.java:173)
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator$1.call(MapJoinOperator.java:169)
> at
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:55)
> ... 32 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error while
> trying to create table container
> at
> org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe.load(MapJoinTableContainerSerDe.java:159)
> at
> org.apache.hadoop.hive.ql.exec.spark.HashTableLoader.load(HashTableLoader.java:117)
> ... 36 more
> *Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error, not a
> directory: *hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
> at
> org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe.load(MapJoinTableContainerSerDe.java:107)
> ... 37 more
>
> *AND*
>
> *java.lang.RuntimeException: Hive Runtime Error while closing operators:
> Unexpected exception: null*
> at
> org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.close(SparkReduceRecordHandler.java:439)
> at
> org.apache.hadoop.hive.ql.exec.spark.HiveReduceFunctionResultList.closeRecordProcessor(HiveReduceFunctionResultList.java:59)
> at
> org.apache.hadoop.hive.ql.exec.spark.HiveBaseFunctionResultList$ResultIterator.hasNext(HiveBaseFunctionResultList.java:106)
> at
> scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:41)
> at
> org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:216)
> at
> org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:62)
> at
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70)
> at
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
> at org.apache.spark.scheduler.Task.run(Task.scala:70)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Unexpected
> exception: null
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:426)
> at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
> at
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
> at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
> at
> org.apache.hadoop.hive.ql.exec.CommonJoinOperator.internalForward(CommonJoinOperator.java:644)
> at
> org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genUniqueJoinObject(CommonJoinOperator.java:657)
> at
> org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genUniqueJoinObject(CommonJoinOperator.java:660)
> at
> org.apache.hadoop.hive.ql.exec.CommonJoinOperator.checkAndGenObject(CommonJoinOperator.java:756)
> at
> org.apache.hadoop.hive.ql.exec.JoinOperator.endGroup(JoinOperator.java:256)
> at
> org.apache.hadoop.hive.ql.exec.spark.SparkReduceRecordHandler.close(SparkReduceRecordHandler.java:418)
> ... 12 more
> *Caused by: java.lang.NullPointerException*
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator.getRefKey(MapJoinOperator.java:327)
> at
> org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:339)
> ... 21 more
>
>
> The hive version is *hive1.2.1* . And I used hint liked
> "/*+mapjoin(some_tmp_table)*/".
>
> When I fetched the yarn app log from hdfs, and greped the log using* "grep
>  '/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/'
> 890.log | sort | uniq | grep 'HashTable-Stage-' | sort | uniq"*
> *I found below lines:*
> grep
>  '/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/'
> 890.log | sort | uniq | grep 'HashTable-Stage-' | sort | uniq
> 15/12/18 15:19:56 INFO exec.HashTableSinkOperator: 2015-12-18 15:19:56
>  Dump the side-table for tag: 1 with group count: 2323 into file:
> hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
> 15/12/18 15:19:56 INFO exec.HashTableSinkOperator: 2015-12-18 15:19:56
>  Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
> (0 bytes)
> 15/12/18 15:19:56 INFO exec.SparkHashTableSinkOperator: Temp URI for side
> table: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
> 15/12/18 15:21:03 INFO exec.HashTableSinkOperator: 2015-12-18 15:21:03
>  Dump the side-table for tag: 1 with group count: 2323 into file:
> hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
> 15/12/18 15:21:03 INFO exec.HashTableSinkOperator: 2015-12-18 15:21:03
>  Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
> (0 bytes)
> 15/12/18 15:21:03 INFO exec.SparkHashTableSinkOperator: Temp URI for side
> table: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
> 15/12/18 15:24:46 INFO exec.HashTableSinkOperator: 2015-12-18 15:24:46
>  Dump the side-table for tag: 1 with group count: 1 into file:
> hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
> 15/12/18 15:24:46 INFO exec.HashTableSinkOperator: 2015-12-18 15:24:46
>  Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
> (0 bytes)
> 15/12/18 15:24:46 INFO exec.SparkHashTableSinkOperator: Temp URI for side
> table: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1
> 15/12/18 15:25:36 INFO exec.HashTableSinkOperator: 2015-12-18 15:25:36
>  Dump the side-table for tag: 1 with group count: 2323 into file:
> hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
> 15/12/18 15:25:36 INFO exec.HashTableSinkOperator: 2015-12-18 15:25:36
>  Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
> (0 bytes)
> 15/12/18 15:25:36 INFO exec.SparkHashTableSinkOperator: Temp URI for side
> table: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
> 15/12/18 15:28:01 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:01
>  Dump the side-table for tag: 1 with group count: 1 into file:
> hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
> 15/12/18 15:28:01 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:01
>  Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
> (0 bytes)
> 15/12/18 15:28:01 INFO exec.SparkHashTableSinkOperator: Temp URI for side
> table: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1
> 15/12/18 15:28:09 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:09
>  Dump the side-table for tag: 1 with group count: 2323 into file:
> hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
> 15/12/18 15:28:09 INFO exec.HashTableSinkOperator: 2015-12-18 15:28:09
>  Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
> (0 bytes)
> 15/12/18 15:28:09 INFO exec.SparkHashTableSinkOperator: Temp URI for side
> table: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1
> 15/12/18 15:41:24 INFO spark.HashTableLoader:   Load back all hashtable
> files from tmp folder uri:hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
> 15/12/18 15:41:25 INFO spark.HashTableLoader:   Load back all hashtable
> files from tmp folder uri:hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
> 2015-12-18 15:19:56     Dump the side-table for tag: 1 with group count:
> 2323 into file: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
> 2015-12-18 15:19:56     Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1660499375
> (0 bytes)
> 2015-12-18 15:21:03     Dump the side-table for tag: 1 with group count:
> 2323 into file: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
> 2015-12-18 15:21:03     Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-1968183527
> (0 bytes)
> 2015-12-18 15:24:46     Dump the side-table for tag: 1 with group count: 1
> into file: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
> 2015-12-18 15:24:46     Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10002/HashTable-Stage-1/MapJoin-mapfile11--.hashtable/HASHTABLESINK_141-292566152
> (0 bytes)
> 2015-12-18 15:25:36     Dump the side-table for tag: 1 with group count:
> 2323 into file: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
> 2015-12-18 15:25:36     Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-326970375
> (0 bytes)
> 2015-12-18 15:28:01     Dump the side-table for tag: 1 with group count: 1
> into file: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
> 2015-12-18 15:28:01     Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10003/HashTable-Stage-1/MapJoin-mapfile21--.hashtable/HASHTABLESINK_144-37062448
> (0 bytes)
> 2015-12-18 15:28:09     Dump the side-table for tag: 1 with group count:
> 2323 into file: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
> 2015-12-18 15:28:09     Uploaded 1 File to: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10004/HashTable-Stage-1/MapJoin-t3-01--.hashtable/HASHTABLESINK_147-985829059
> (0 bytes)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Error, not a
> directory: hdfs://{nameNode
> ip}/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable
>
> That means nothing has been dump to the path
> "/data/hadoop/hive/tmp/mqq/c1781e95-ac9b-4a61-87a4-b903bfc47962/hive_2015-12-18_15-19-08_370_592467001322890134-1/-mr-10001/HashTable-Stage-1/MapJoin-t3-01--.hashtable"
> when processing the map side join.
>
>
> Any help will be appreciated!
>