You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by st...@apache.org on 2009/08/26 17:01:34 UTC

svn commit: r808036 [4/4] - in /hadoop/mapreduce/branches/MAPREDUCE-233: ./ .eclipse.templates/ conf/ ivy/ lib/ src/c++/ src/contrib/ src/contrib/capacity-scheduler/ src/contrib/data_join/ src/contrib/dynamic-scheduler/ src/contrib/eclipse-plugin/ src/...

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java Wed Aug 26 15:01:29 2009
@@ -100,6 +100,7 @@
                    hostsSeen.size());
     } catch (IOException ioe) {
       stopCluster();
+      throw ioe;
     }
   }
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java Wed Aug 26 15:01:29 2009
@@ -79,10 +79,8 @@
     RunningJob rJob1 = (new JobClient(job1)).submitJob(job1);
     LOG.info("Submitted job " + rJob1.getID());
     
-    while (rJob1.mapProgress() < 0.5f) {
-      LOG.info("Waiting for job " + rJob1.getID() + " to be 50% done");
-      UtilsForTests.waitFor(100);
-    }
+    // wait for 50%
+    UtilsForTests.waitForJobHalfDone(rJob1);
     
     JobConf job2 = mr.createJobConf();
     
@@ -94,10 +92,8 @@
     RunningJob rJob2 = (new JobClient(job2)).submitJob(job2);
     LOG.info("Submitted job " + rJob2.getID());
     
-    while (rJob2.mapProgress() < 0.5f) {
-      LOG.info("Waiting for job " + rJob2.getID() + " to be 50% done");
-      UtilsForTests.waitFor(100);
-    }
+    // wait for 50%
+    UtilsForTests.waitForJobHalfDone(rJob2);
     
     // kill the jobtracker
     LOG.info("Stopping jobtracker");

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java Wed Aug 26 15:01:29 2009
@@ -26,6 +26,8 @@
 import java.util.Set;
 import java.util.Map.Entry;
 
+import javax.security.auth.login.LoginException;
+
 import junit.extensions.TestSetup;
 import junit.framework.Test;
 import junit.framework.TestCase;
@@ -85,7 +87,7 @@
       org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobTracker {
   
     FakeJobTracker(JobConf conf, Clock clock, String[] tts) throws IOException,
-        InterruptedException {
+        InterruptedException, LoginException {
       super(conf, clock, tts);
     }
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java Wed Aug 26 15:01:29 2009
@@ -196,16 +196,8 @@
       return;
     }
 
-    long PER_TASK_LIMIT = 1L; // Low enough to kill off sleepJob tasks.
-
-    Pattern taskOverLimitPattern =
-        Pattern.compile(String.format(taskOverLimitPatternString, String
-            .valueOf(PER_TASK_LIMIT*1024*1024L)));
-    Matcher mat = null;
-
     // Start cluster with proper configuration.
     JobConf fConf = new JobConf();
-
     // very small value, so that no task escapes to successful completion.
     fConf.set("mapred.tasktracker.taskmemorymanager.monitoring-interval",
         String.valueOf(300));
@@ -215,6 +207,51 @@
         JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
         2 * 1024);
     startCluster(fConf);
+    runJobExceedingMemoryLimit();
+  }
+  
+  /**
+   * Runs tests with tasks beyond limit and using old configuration values for
+   * the TaskTracker.
+   * 
+   * @throws Exception
+   */
+
+  public void testTaskMemoryMonitoringWithDeprecatedConfiguration () 
+    throws Exception {
+    
+    // Run the test only if memory management is enabled
+    if (!isProcfsBasedTreeAvailable()) {
+      return;
+    }
+    // Start cluster with proper configuration.
+    JobConf fConf = new JobConf();
+    // very small value, so that no task escapes to successful completion.
+    fConf.set("mapred.tasktracker.taskmemorymanager.monitoring-interval",
+        String.valueOf(300));
+    //set old values, max vm property per task and upper limit on the tasks
+    //vm
+    //setting the default maximum vmem property to 2 GB
+    fConf.setLong(JobConf.MAPRED_TASK_DEFAULT_MAXVMEM_PROPERTY,
+        (2L * 1024L * 1024L * 1024L));
+    fConf.setLong(JobConf.UPPER_LIMIT_ON_TASK_VMEM_PROPERTY, 
+        (3L * 1024L * 1024L * 1024L));
+    startCluster(fConf);
+    runJobExceedingMemoryLimit();
+  }
+
+  /**
+   * Runs a job which should fail the when run by the memory monitor.
+   * 
+   * @throws IOException
+   */
+  private void runJobExceedingMemoryLimit() throws IOException {
+    long PER_TASK_LIMIT = 1L; // Low enough to kill off sleepJob tasks.
+
+    Pattern taskOverLimitPattern =
+        Pattern.compile(String.format(taskOverLimitPatternString, String
+            .valueOf(PER_TASK_LIMIT*1024*1024L)));
+    Matcher mat = null;
 
     // Set up job.
     JobConf conf = new JobConf(miniMRCluster.createJobConf());

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java Wed Aug 26 15:01:29 2009
@@ -20,6 +20,8 @@
 import java.io.IOException;
 import java.util.ArrayList;
 
+import javax.security.auth.login.LoginException;
+
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobInProgress;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
@@ -39,7 +41,7 @@
       org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobTracker {
 
     FakeJobTracker(JobConf conf, Clock clock, String[] tts) throws IOException,
-        InterruptedException {
+        InterruptedException, LoginException {
       super(conf, clock, tts);
     }
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java Wed Aug 26 15:01:29 2009
@@ -242,6 +242,7 @@
       if(System.currentTimeMillis() > timeout) {
         throw new IOException("Timeout waiting for job to get to 50% done");
       }
+      LOG.info("Waiting for job " + job.getID() + " to be 50% done");
       UtilsForTests.waitFor(100);
     }
   }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java Wed Aug 26 15:01:29 2009
@@ -18,22 +18,28 @@
 
 package org.apache.hadoop.mapreduce;
 
+import java.io.DataInput;
+import java.io.DataOutput;
 import java.io.BufferedReader;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.text.NumberFormat;
+import java.util.ArrayList;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -43,6 +49,7 @@
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /**
  * Utility methods used in various Job Control unit tests.
@@ -237,6 +244,99 @@
     }
   }
 
+  public static class IncomparableKey implements WritableComparable<Object> {
+    public void write(DataOutput out) { }
+    public void readFields(DataInput in) { }
+    public int compareTo(Object o) {
+      throw new RuntimeException("Should never see this.");
+    }
+  }
+
+  public static class FakeSplit extends InputSplit implements Writable {
+    public void write(DataOutput out) throws IOException { }
+    public void readFields(DataInput in) throws IOException { }
+    public long getLength() { return 0L; }
+    public String[] getLocations() { return new String[0]; }
+  }
+
+  public static class Fake_IF<K,V>
+    extends InputFormat<K, V> 
+    implements Configurable {
+
+    public Fake_IF() { }
+
+    public List<InputSplit> getSplits(JobContext context) {
+      List<InputSplit> ret = new ArrayList<InputSplit>(); 
+      ret.add(new FakeSplit());
+      return ret;
+    }
+    public static void setKeyClass(Configuration conf, Class<?> k) {
+      conf.setClass("test.fakeif.keyclass", k, WritableComparable.class);
+    }
+
+    public static void setValClass(Configuration job, Class<?> v) {
+      job.setClass("test.fakeif.valclass", v, Writable.class);
+    }
+
+    protected Class<? extends K> keyclass;
+    protected Class<? extends V> valclass;
+    Configuration conf = null;
+
+    @SuppressWarnings("unchecked")
+    public void setConf(Configuration conf) {
+      this.conf = conf;
+      keyclass = (Class<? extends K>) conf.getClass("test.fakeif.keyclass",
+          NullWritable.class, WritableComparable.class);
+      valclass = (Class<? extends V>) conf.getClass("test.fakeif.valclass",
+          NullWritable.class, WritableComparable.class);
+    }
+
+    public Configuration getConf() {
+      return conf;
+    }
+    
+    public RecordReader<K,V> createRecordReader(
+        InputSplit ignored, TaskAttemptContext context) {
+      return new RecordReader<K,V>() {
+        public boolean nextKeyValue() throws IOException { return false; }
+        public void initialize(InputSplit split, TaskAttemptContext context) 
+            throws IOException, InterruptedException {}
+        public K getCurrentKey() {
+        return null;
+        }
+        public V getCurrentValue() {
+          return null;
+        }
+        public void close() throws IOException { }
+        public float getProgress() throws IOException { return 0.0f; }
+      };
+    }
+  }
+  
+  public static class Fake_RR<K, V> extends RecordReader<K,V> {
+    private Class<? extends K> keyclass;
+    private Class<? extends V> valclass;
+    public boolean nextKeyValue() throws IOException { return false; }
+    @SuppressWarnings("unchecked")
+    public void initialize(InputSplit split, TaskAttemptContext context) 
+        throws IOException, InterruptedException {
+      Configuration conf = context.getConfiguration();
+      keyclass = (Class<? extends K>) conf.getClass("test.fakeif.keyclass",
+        NullWritable.class, WritableComparable.class);
+      valclass = (Class<? extends V>) conf.getClass("test.fakeif.valclass",
+        NullWritable.class, WritableComparable.class);
+      
+    }
+    public K getCurrentKey() {
+      return ReflectionUtils.newInstance(keyclass, null);
+    }
+    public V getCurrentValue() {
+      return ReflectionUtils.newInstance(valclass, null);
+    }
+    public void close() throws IOException { }
+    public float getProgress() throws IOException { return 0.0f; }
+  }
+
   public static Job createJob(Configuration conf, Path inDir, Path outDir, 
       int numInputFiles, int numReds) throws IOException {
     String input = "The quick brown fox\n" + "has many silly\n"

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java Wed Aug 26 15:01:29 2009
@@ -29,6 +29,7 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.log4j.Level;
 
 public class TestChild extends HadoopTestCase {
   private static String TEST_ROOT_DIR =
@@ -68,6 +69,12 @@
                      mapJavaOpts, 
                      mapJavaOpts, MAP_OPTS_VAL);
       }
+      
+      Level logLevel = 
+        Level.toLevel(conf.get(JobConf.MAPRED_MAP_TASK_LOG_LEVEL, 
+                               Level.INFO.toString()));  
+      assertEquals(JobConf.MAPRED_MAP_TASK_LOG_LEVEL + "has value of " + 
+                   logLevel, logLevel, Level.OFF);
     }
   }
   
@@ -94,6 +101,12 @@
                      reduceJavaOpts, 
                      reduceJavaOpts, REDUCE_OPTS_VAL);
       }
+      
+      Level logLevel = 
+        Level.toLevel(conf.get(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL, 
+                               Level.INFO.toString()));  
+      assertEquals(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL + "has value of " + 
+                   logLevel, logLevel, Level.OFF);
     }
   }
   
@@ -108,6 +121,9 @@
       conf.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, REDUCE_OPTS_VAL);
     }
     
+    conf.set(JobConf.MAPRED_MAP_TASK_LOG_LEVEL, Level.OFF.toString());
+    conf.set(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL, Level.OFF.toString());
+    
     Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 
                 numMaps, numReds);
     job.setMapperClass(MyMapper.class);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java?rev=808036&r1=808035&r2=808036&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java Wed Aug 26 15:01:29 2009
@@ -161,6 +161,9 @@
     assertEquals("map out = combine in", mapOut, combineIn);
     assertEquals("combine out = reduce in", combineOut, reduceIn);
     assertTrue("combine in > combine out", combineIn > combineOut);
+    String group = "Random Group";
+    CounterGroup ctrGrp = ctrs.getGroup(group);
+    assertEquals(0, ctrGrp.size());
   }
 
   public void runMultiFileWordCount(Configuration  conf) throws Exception  {

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/src/webapps/job/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Aug 26 15:01:29 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/webapps/job:713112
 /hadoop/core/trunk/src/webapps/job:776175-785643
-/hadoop/mapreduce/trunk/src/webapps/job:804974-805826
+/hadoop/mapreduce/trunk/src/webapps/job:804974-807678