You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by at...@apache.org on 2009/04/08 23:51:50 UTC

svn commit: r763418 - in /hadoop/hive/branches/branch-0.3: ./ common/src/java/org/apache/hadoop/hive/conf/ data/scripts/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: athusoo
Date: Wed Apr  8 21:51:50 2009
New Revision: 763418

URL: http://svn.apache.org/viewvc?rev=763418&view=rev
Log:
HIVE-399. Fix timeout problems caused due to the bigdata test.
(Namit Jain via athusoo)


Modified:
    hadoop/hive/branches/branch-0.3/CHANGES.txt
    hadoop/hive/branches/branch-0.3/build-common.xml
    hadoop/hive/branches/branch-0.3/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hadoop/hive/branches/branch-0.3/data/scripts/dumpdata_script.py
    hadoop/hive/branches/branch-0.3/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/branches/branch-0.3/ql/src/test/queries/clientpositive/groupby_bigdata.q
    hadoop/hive/branches/branch-0.3/ql/src/test/results/clientpositive/groupby_bigdata.q.out

Modified: hadoop/hive/branches/branch-0.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/CHANGES.txt?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/CHANGES.txt (original)
+++ hadoop/hive/branches/branch-0.3/CHANGES.txt Wed Apr  8 21:51:50 2009
@@ -423,3 +423,6 @@
 
     HIVE-283. Do case insensitive comparison of aliases in partition
     pruning. (athusoo)
+
+    HIVE-399. Fix timeout problems caused due to the bigdata test.
+    (Namit Jain via athusoo)

Modified: hadoop/hive/branches/branch-0.3/build-common.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/build-common.xml?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/build-common.xml (original)
+++ hadoop/hive/branches/branch-0.3/build-common.xml Wed Apr  8 21:51:50 2009
@@ -59,7 +59,7 @@
   <property name="test.include" value="Test*"/>
   <property name="test.classpath.id" value="test.classpath"/>
   <property name="test.output" value="true"/>
-  <property name="test.timeout" value="1800000"/>
+  <property name="test.timeout" value="2700000"/>
   <property name="test.junit.output.format" value="xml"/>
   <property name="test.junit.output.usefile" value="true"/>
 

Modified: hadoop/hive/branches/branch-0.3/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/branches/branch-0.3/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Apr  8 21:51:50 2009
@@ -121,8 +121,10 @@
     // HWI
     HIVEHWILISTENHOST("hive.hwi.listen.host","0.0.0.0"),
     HIVEHWILISTENPORT("hive.hwi.listen.port","9999"),
-    HIVEHWIWARFILE("hive.hwi.war.file",System.getenv("HIVE_HOME")+"/lib/hive_hwi.war");
+    HIVEHWIWARFILE("hive.hwi.war.file",System.getenv("HIVE_HOME")+"/lib/hive_hwi.war"),
 
+    // mapper/reducer memory in local mode
+    HIVEHADOOPMAXMEM("hive.mapred.local.mem", 0);
     
     
     public final String varname;

Modified: hadoop/hive/branches/branch-0.3/data/scripts/dumpdata_script.py
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/data/scripts/dumpdata_script.py?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/data/scripts/dumpdata_script.py (original)
+++ hadoop/hive/branches/branch-0.3/data/scripts/dumpdata_script.py Wed Apr  8 21:51:50 2009
@@ -1,11 +1,4 @@
-for i in xrange(100):
-   for j in xrange(10):
-      for k in xrange(42022):      
-         print 42000 * i + k
-
-
-for i in xrange(100):
-   for j in xrange(10):
-      for k in xrange(42022):      
-         print 5000000 + (42000 * i) + k
-
+for i in xrange(50):
+   for j in xrange(5):
+      for k in xrange(20022):      
+         print 20000 * i + k

Modified: hadoop/hive/branches/branch-0.3/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hadoop/hive/branches/branch-0.3/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Wed Apr  8 21:51:50 2009
@@ -21,6 +21,7 @@
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.Serializable;
+import java.util.Map;
 
 import org.apache.hadoop.mapred.JobConf;
 
@@ -59,7 +60,7 @@
       LOG.info("Generating plan file " + planFile.toString());
       FileOutputStream out = new FileOutputStream(planFile);
       Utilities.serializeMapRedWork(plan, out);
-    
+
       String cmdLine = hadoopExec + " jar " + auxJars + " " + hiveJar + " org.apache.hadoop.hive.ql.exec.ExecDriver -plan " + planFile.toString() + " " + hiveConfArgs;
       
       String files = ExecDriver.getRealFiles(conf);
@@ -68,8 +69,29 @@
       }
 
       LOG.info("Executing: " + cmdLine);
-      Process executor = Runtime.getRuntime().exec(cmdLine);
+      Process executor = null;
 
+      // The user can specify the hadoop memory
+      int hadoopMem = conf.getIntVar(HiveConf.ConfVars.HIVEHADOOPMAXMEM);
+
+      if (hadoopMem == 0) 
+        executor = Runtime.getRuntime().exec(cmdLine);
+      // user specified the memory - only applicable for local mode
+      else {
+        Map<String, String> variables = System.getenv();  
+        String[] env = new String[variables.size() + 1];
+        int pos = 0;
+        
+        for (Map.Entry<String, String> entry : variables.entrySet())  
+        {  
+          String name = entry.getKey();  
+          String value = entry.getValue();  
+          env[pos++] = name + "=" + value;  
+        }  
+        
+        env[pos] = new String("HADOOP_HEAPSIZE=" + hadoopMem);
+        executor = Runtime.getRuntime().exec(cmdLine, env);
+      }
 
       StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out);
       StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, System.err);

Modified: hadoop/hive/branches/branch-0.3/ql/src/test/queries/clientpositive/groupby_bigdata.q
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/ql/src/test/queries/clientpositive/groupby_bigdata.q?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/ql/src/test/queries/clientpositive/groupby_bigdata.q (original)
+++ hadoop/hive/branches/branch-0.3/ql/src/test/queries/clientpositive/groupby_bigdata.q Wed Apr  8 21:51:50 2009
@@ -1,4 +1,5 @@
-set hive.map.aggr.hash.percentmemory = 0.4;
+set hive.map.aggr.hash.percentmemory = 0.3;
+set hive.mapred.local.mem = 256;
 
 select count(distinct subq.key) from
 (FROM src MAP src.key USING 'python ../data/scripts/dumpdata_script.py' AS key WHERE src.key = 10) subq;

Modified: hadoop/hive/branches/branch-0.3/ql/src/test/results/clientpositive/groupby_bigdata.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.3/ql/src/test/results/clientpositive/groupby_bigdata.q.out?rev=763418&r1=763417&r2=763418&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.3/ql/src/test/results/clientpositive/groupby_bigdata.q.out (original)
+++ hadoop/hive/branches/branch-0.3/ql/src/test/results/clientpositive/groupby_bigdata.q.out Wed Apr  8 21:51:50 2009
@@ -1 +1 @@
-8400044
+1000022