You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/11/24 07:44:05 UTC

svn commit: r1038500 - in /hive/trunk: CHANGES.txt common/src/java/org/apache/hadoop/hive/conf/HiveConf.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/test/results/clientpositive/auto_join25.q.out

Author: namit
Date: Wed Nov 24 06:44:04 2010
New Revision: 1038500

URL: http://svn.apache.org/viewvc?rev=1038500&view=rev
Log:
HIVE-1808 Wrong parameter names in HiveConf/hive-default.xml
for joins (Liyin Tang via namit)


Modified:
    hive/trunk/CHANGES.txt
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out

Modified: hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hive/trunk/CHANGES.txt?rev=1038500&r1=1038499&r2=1038500&view=diff
==============================================================================
--- hive/trunk/CHANGES.txt (original)
+++ hive/trunk/CHANGES.txt Wed Nov 24 06:44:04 2010
@@ -543,6 +543,9 @@ Trunk -  Unreleased
     HIVE-1809 Hive Comparison Operators are broken for NaN values
     (Paul Butler via Ning Zhang)
 
+    HIVE-1808 Wrong parameter names in HiveConf/hive-default.xml
+    for joins (Liyin Tang via namit)
+
   TESTS
 
     HIVE-1464. improve  test query performance

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1038500&r1=1038499&r2=1038500&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Nov 24 06:44:04 2010
@@ -256,8 +256,8 @@ public class HiveConf extends Configurat
     HIVEMAXMAPJOINSIZE("hive.mapjoin.maxsize", 100000),
     HIVEHASHTABLETHRESHOLD("hive.hashtable.initialCapacity", 100000),
     HIVEHASHTABLELOADFACTOR("hive.hashtable.loadfactor", (float) 0.75),
-    HIVEHASHTABLEMAXMEMORYUSAGE("hive.hashtable.max.memory.usage", (float) 0.90),
-    HIVEHASHTABLESCALE("hive.hashtable.scale", (long)100000),
+    HIVEHASHTABLEMAXMEMORYUSAGE("hive.mapjoin.localtask.max.memory.usage", (float) 0.90),
+    HIVEHASHTABLESCALE("hive.mapjoin.check.memory.rows", (long)100000),
 
     HIVEDEBUGLOCALTASK("hive.debug.localtask",false),
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1038500&r1=1038499&r2=1038500&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Nov 24 06:44:04 2010
@@ -837,10 +837,10 @@ public class Driver implements CommandPr
           if (backupTask != null) {
             errorMessage = "FAILED: Execution Error, return code " + exitVal + " from "
                 + tsk.getClass().getName();
-            console.printInfo(errorMessage);
+            console.printError(errorMessage);
 
             errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName();
-            console.printInfo(errorMessage);
+            console.printError(errorMessage);
 
             // add backup task to runnable
             if (DriverContext.isLaunchable(backupTask)) {

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out?rev=1038500&r1=1038499&r2=1038500&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out Wed Nov 24 06:44:04 2010
@@ -13,6 +13,8 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@dest1
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
+ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
 INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value 
 where (src1.ds = '2008-04-08' or src1.ds = '2008-04-09' )and (src1.hr = '12' or src1.hr = '11')
@@ -28,11 +30,11 @@ POSTHOOK: Lineage: dest1.value SIMPLE [(
 PREHOOK: query: SELECT sum(hash(dest1.key,dest1.value)) FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-09-55_396_5931103673819276275/-mr-10000
+PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-12_062_2731833788874193660/-mr-10000
 POSTHOOK: query: SELECT sum(hash(dest1.key,dest1.value)) FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-09-55_396_5931103673819276275/-mr-10000
+POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-12_062_2731833788874193660/-mr-10000
 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
 407444119660
@@ -48,6 +50,10 @@ INSERT OVERWRITE TABLE dest_j2 SELECT sr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j2
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
+ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
+ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
 INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value
 POSTHOOK: type: QUERY
@@ -60,11 +66,11 @@ POSTHOOK: Lineage: dest_j2.value SIMPLE 
 PREHOOK: query: SELECT sum(hash(dest_j2.key,dest_j2.value)) FROM dest_j2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j2
-PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-05_166_1509512648391049274/-mr-10000
+PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-36_524_5308749215651001089/-mr-10000
 POSTHOOK: query: SELECT sum(hash(dest_j2.key,dest_j2.value)) FROM dest_j2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_j2
-POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-05_166_1509512648391049274/-mr-10000
+POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-36_524_5308749215651001089/-mr-10000
 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j2.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]
@@ -84,6 +90,8 @@ INSERT OVERWRITE TABLE dest_j1 SELECT sr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j1
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask
+ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
 INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
 POSTHOOK: type: QUERY
@@ -98,11 +106,11 @@ POSTHOOK: Lineage: dest_j2.value SIMPLE 
 PREHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j1
-PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-12_389_3216138172725864168/-mr-10000
+PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-49_448_5881123257419888652/-mr-10000
 POSTHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_j1
-POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-12_389_3216138172725864168/-mr-10000
+POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-49_448_5881123257419888652/-mr-10000
 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]