You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/05/15 21:21:41 UTC

svn commit: r406722 - /lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java

Author: cutting
Date: Mon May 15 12:21:40 2006
New Revision: 406722

URL: http://svn.apache.org/viewcvs?rev=406722&view=rev
Log:
Set number of reduce tasks to be the number of reduce nodes.  This
results in optimal sort performance.  Contributed by Owen.

Modified:
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java?rev=406722&r1=406721&r2=406722&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java Mon May 15 12:21:40 2006
@@ -69,7 +69,7 @@
     int num_maps = cluster.getTaskTrackers() * 
          jobConf.getInt("test.sort.maps_per_host", 10);
     int num_reduces = cluster.getTaskTrackers() * 
-        jobConf.getInt("test.sort.reduces_per_host", 10);
+        jobConf.getInt("test.sort.reduces_per_host", cluster.getMaxTasks());
     List otherArgs = new ArrayList();
     for(int i=0; i < args.length; ++i) {
       try {
@@ -109,7 +109,7 @@
         cluster.getTaskTrackers() +
         " nodes to sort from " + 
         jobConf.getInputPaths()[0] + " into " +
-        jobConf.getOutputPath() + ".");
+        jobConf.getOutputPath() + " with " + num_reduces + " reduces.");
     Date startTime = new Date();
     System.out.println("Job started: " + startTime);
     JobClient.runJob(jobConf);