You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by am...@apache.org on 2011/10/18 16:45:51 UTC

svn commit: r1185694 [5/7] - in /hadoop/common/branches/branch-0.20-security: ./ src/contrib/ src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/ src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/emulators/ src/contrib/gridmix/sr...

Added: hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java?rev=1185694&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java Tue Oct 18 14:45:48 2011
@@ -0,0 +1,371 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.UtilsForTests;
+import org.apache.hadoop.mapred.gridmix.GenerateData.DataStatistics;
+import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
+import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.tools.rumen.JobStory;
+import org.apache.hadoop.tools.rumen.JobStoryProducer;
+import org.junit.Test;
+
+/**
+ * Test {@link ExecutionSummarizer} and {@link ClusterSummarizer}.
+ */
+public class TestGridmixSummary {
+  
+  /**
+   * Test {@link DataStatistics}.
+   */
+  @Test
+  public void testDataStatistics() throws Exception {
+    // test data-statistics getters with compression enabled
+    DataStatistics stats = new DataStatistics(10, 2, true);
+    assertEquals("Data size mismatch", 10, stats.getDataSize());
+    assertEquals("Num files mismatch", 2, stats.getNumFiles());
+    assertTrue("Compression configuration mismatch", stats.isDataCompressed());
+    
+    // test data-statistics getters with compression disabled
+    stats = new DataStatistics(100, 5, false);
+    assertEquals("Data size mismatch", 100, stats.getDataSize());
+    assertEquals("Num files mismatch", 5, stats.getNumFiles());
+    assertFalse("Compression configuration mismatch", stats.isDataCompressed());
+    
+    // test publish data stats
+    Configuration conf = new Configuration();
+    Path rootTempDir = new Path(System.getProperty("test.build.data", "/tmp"));
+    Path testDir = new Path(rootTempDir, "testDataStatistics");
+    FileSystem fs = testDir.getFileSystem(conf);
+    fs.delete(testDir, true);
+    Path testInputDir = new Path(testDir, "test");
+    fs.mkdirs(testInputDir);
+    
+    // test empty folder (compression = true)
+    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
+    Boolean failed = null;
+    try {
+      GenerateData.publishDataStatistics(testInputDir, 1024L, conf);
+      failed = false;
+    } catch (RuntimeException e) {
+      failed = true;
+    }
+    assertNotNull("Expected failure!", failed);
+    assertTrue("Compression data publishing error", failed);
+    
+    // test with empty folder (compression = off)
+    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, false);
+    stats = GenerateData.publishDataStatistics(testInputDir, 1024L, conf);
+    assertEquals("Data size mismatch", 0, stats.getDataSize());
+    assertEquals("Num files mismatch", 0, stats.getNumFiles());
+    assertFalse("Compression configuration mismatch", stats.isDataCompressed());
+    
+    // test with some plain input data (compression = off)
+    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, false);
+    Path inputDataFile = new Path(testInputDir, "test");
+    long size = 
+      UtilsForTests.createTmpFileDFS(fs, inputDataFile, 
+          FsPermission.createImmutable((short)777), "hi hello bye").size();
+    stats = GenerateData.publishDataStatistics(testInputDir, -1, conf);
+    assertEquals("Data size mismatch", size, stats.getDataSize());
+    assertEquals("Num files mismatch", 1, stats.getNumFiles());
+    assertFalse("Compression configuration mismatch", stats.isDataCompressed());
+    
+    // test with some plain input data (compression = on)
+    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
+    failed = null;
+    try {
+      GenerateData.publishDataStatistics(testInputDir, 1234L, conf);
+      failed = false;
+    } catch (RuntimeException e) {
+      failed = true;
+    }
+    assertNotNull("Expected failure!", failed);
+    assertTrue("Compression data publishing error", failed);
+    
+    // test with some compressed input data (compression = off)
+    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, false);
+    fs.delete(inputDataFile, false);
+    inputDataFile = new Path(testInputDir, "test.gz");
+    size = 
+      UtilsForTests.createTmpFileDFS(fs, inputDataFile, 
+          FsPermission.createImmutable((short)777), "hi hello").size();
+    stats =  GenerateData.publishDataStatistics(testInputDir, 1234L, conf);
+    assertEquals("Data size mismatch", size, stats.getDataSize());
+    assertEquals("Num files mismatch", 1, stats.getNumFiles());
+    assertFalse("Compression configuration mismatch", stats.isDataCompressed());
+    
+    // test with some compressed input data (compression = on)
+    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
+    stats = GenerateData.publishDataStatistics(testInputDir, 1234L, conf);
+    assertEquals("Data size mismatch", size, stats.getDataSize());
+    assertEquals("Num files mismatch", 1, stats.getNumFiles());
+    assertTrue("Compression configuration mismatch", stats.isDataCompressed());
+  }
+  
+  /**
+   * A fake {@link JobFactory}.
+   */
+  @SuppressWarnings("unchecked")
+  private static class FakeJobFactory extends JobFactory {
+    /**
+     * A fake {@link JobStoryProducer} for {@link FakeJobFactory}.
+     */
+    private static class FakeJobStoryProducer implements JobStoryProducer {
+      @Override
+      public void close() throws IOException {
+      }
+
+      @Override
+      public JobStory getNextJob() throws IOException {
+        return null;
+      }
+    }
+    
+    FakeJobFactory(Configuration conf) {
+      super(null, new FakeJobStoryProducer(), null, conf, null, null);
+    }
+    
+    @Override
+    public void update(Object item) {
+    }
+    
+    @Override
+    protected Thread createReaderThread() {
+      return null;
+    }
+  }
+  
+  /**
+   * Test {@link ExecutionSummarizer}.
+   */
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testExecutionSummarizer() throws IOException {
+    Configuration conf = new Configuration();
+    
+    ExecutionSummarizer es = new ExecutionSummarizer();
+    assertEquals("ExecutionSummarizer init failed", 
+                 Summarizer.NA, es.getCommandLineArgsString());
+    
+    long startTime = System.currentTimeMillis();
+    // test configuration parameters
+    String[] initArgs = new String[] {"-Xmx20m", "-Dtest.args='test'"};
+    es = new ExecutionSummarizer(initArgs);
+    
+    assertEquals("ExecutionSummarizer init failed", 
+                 "-Xmx20m -Dtest.args='test'", 
+                 es.getCommandLineArgsString());
+    
+    // test start time
+    assertTrue("Start time mismatch", es.getStartTime() >= startTime);
+    assertTrue("Start time mismatch", 
+               es.getStartTime() <= System.currentTimeMillis());
+    
+    // test start() of ExecutionSummarizer
+    es.update(null);
+    assertEquals("ExecutionSummarizer init failed", 0, 
+                 es.getSimulationStartTime());
+    testExecutionSummarizer(0, 0, 0, 0, 0, 0, es);
+    
+    long simStartTime = System.currentTimeMillis();
+    es.start(null);
+    assertTrue("Simulation start time mismatch", 
+               es.getSimulationStartTime() >= simStartTime);
+    assertTrue("Simulation start time mismatch", 
+               es.getSimulationStartTime() <= System.currentTimeMillis());
+    
+    // test with job stats
+    JobStats stats = generateFakeJobStats(1, 10, true);
+    es.update(stats);
+    testExecutionSummarizer(1, 10, 0, 1, 1, 0, es);
+    
+    // test with failed job 
+    stats = generateFakeJobStats(5, 1, false);
+    es.update(stats);
+    testExecutionSummarizer(6, 11, 0, 2, 1, 1, es);
+    
+    // test finalize
+    //  define a fake job factory
+    JobFactory factory = new FakeJobFactory(conf);
+    
+    // fake the num jobs in trace
+    factory.numJobsInTrace = 3;
+    
+    Path rootTempDir = new Path(System.getProperty("test.build.data", "/tmp"));
+    Path testDir = new Path(rootTempDir, "testGridmixSummary");
+    Path testTraceFile = new Path(testDir, "test-trace.json");
+    FileSystem fs = FileSystem.getLocal(conf);
+    fs.create(testTraceFile).close();
+    
+    // finalize the summarizer
+    UserResolver resolver = new RoundRobinUserResolver();
+    DataStatistics dataStats = new DataStatistics(100, 2, true);
+    String policy = GridmixJobSubmissionPolicy.REPLAY.name();
+    conf.set(GridmixJobSubmissionPolicy.JOB_SUBMISSION_POLICY, policy);
+    es.finalize(factory, testTraceFile.toString(), 1024L, resolver, dataStats, 
+                conf);
+    
+    // test num jobs in trace
+    assertEquals("Mismtach in num jobs in trace", 3, es.getNumJobsInTrace());
+    
+    // test trace signature
+    String tid = 
+      ExecutionSummarizer.getTraceSignature(testTraceFile.toString());
+    assertEquals("Mismatch in trace signature", 
+                 tid, es.getInputTraceSignature());
+    // test trace location
+    Path qPath = fs.makeQualified(testTraceFile);
+    assertEquals("Mismatch in trace signature", 
+                 qPath.toString(), es.getInputTraceLocation());
+    // test expected data size
+    assertEquals("Mismatch in expected data size", 
+                 "1.0k", es.getExpectedDataSize());
+    // test input data statistics
+    assertEquals("Mismatch in input data statistics", 
+                 ExecutionSummarizer.stringifyDataStatistics(dataStats), 
+                 es.getInputDataStatistics());
+    // test user resolver
+    assertEquals("Mismatch in user resolver", 
+                 resolver.getClass().getName(), es.getUserResolver());
+    // test policy
+    assertEquals("Mismatch in policy", policy, es.getJobSubmissionPolicy());
+    
+    // test data stringification using large data
+    es.finalize(factory, testTraceFile.toString(), 1024*1024*1024*10L, resolver,
+                dataStats, conf);
+    assertEquals("Mismatch in expected data size", 
+                 "10.0g", es.getExpectedDataSize());
+    
+    // test trace signature uniqueness
+    //  touch the trace file
+    fs.delete(testTraceFile, false);
+    //  sleep for 1 sec
+    try {
+      Thread.sleep(1000);
+    } catch (InterruptedException ie) {}
+    fs.create(testTraceFile).close();
+    es.finalize(factory, testTraceFile.toString(), 0L, resolver, dataStats, 
+                conf);
+    // test missing expected data size
+    assertEquals("Mismatch in trace signature", 
+                 Summarizer.NA, es.getExpectedDataSize());
+    assertFalse("Mismatch in trace signature", 
+                tid.equals(es.getInputTraceSignature()));
+    // get the new identifier
+    tid = ExecutionSummarizer.getTraceSignature(testTraceFile.toString());
+    assertEquals("Mismatch in trace signature", 
+                 tid, es.getInputTraceSignature());
+    
+    testTraceFile = new Path(testDir, "test-trace2.json");
+    fs.create(testTraceFile).close();
+    es.finalize(factory, testTraceFile.toString(), 0L, resolver, dataStats, 
+                conf);
+    assertFalse("Mismatch in trace signature", 
+                tid.equals(es.getInputTraceSignature()));
+    // get the new identifier
+    tid = ExecutionSummarizer.getTraceSignature(testTraceFile.toString());
+    assertEquals("Mismatch in trace signature", 
+                 tid, es.getInputTraceSignature());
+    
+  }
+  
+  // test the ExecutionSummarizer
+  private static void testExecutionSummarizer(int numMaps, int numReds,
+      int totalJobsInTrace, int totalJobSubmitted, int numSuccessfulJob, 
+      int numFailedJobs, ExecutionSummarizer es) {
+    assertEquals("ExecutionSummarizer test failed [num-maps]", 
+                 numMaps, es.getNumMapTasksLaunched());
+    assertEquals("ExecutionSummarizer test failed [num-reducers]", 
+                 numReds, es.getNumReduceTasksLaunched());
+    assertEquals("ExecutionSummarizer test failed [num-jobs-in-trace]", 
+                 totalJobsInTrace, es.getNumJobsInTrace());
+    assertEquals("ExecutionSummarizer test failed [num-submitted jobs]", 
+                 totalJobSubmitted, es.getNumSubmittedJobs());
+    assertEquals("ExecutionSummarizer test failed [num-successful-jobs]", 
+                 numSuccessfulJob, es.getNumSuccessfulJobs());
+    assertEquals("ExecutionSummarizer test failed [num-failed jobs]", 
+                 numFailedJobs, es.getNumFailedJobs());
+  }
+  
+  // generate fake job stats
+  @SuppressWarnings("deprecation")
+  private static JobStats generateFakeJobStats(final int numMaps, 
+      final int numReds, final boolean isSuccessful) 
+  throws IOException {
+    // A fake job 
+    Job fakeJob = new Job() {
+      @Override
+      public int getNumReduceTasks() {
+        return numReds;
+      };
+      
+      @Override
+      public boolean isSuccessful() throws IOException {
+        return isSuccessful;
+      };
+    };
+    return new JobStats(numMaps, fakeJob);
+  }
+  
+  /**
+   * Test {@link ClusterSummarizer}.
+   */
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testClusterSummarizer() throws IOException {
+    ClusterSummarizer cs = new ClusterSummarizer();
+    Configuration conf = new Configuration();
+    
+    String jt = "test-jt:1234";
+    String nn = "test-nn:5678";
+    conf.set("mapred.job.tracker", jt);
+    conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, nn);
+    cs.start(conf);
+    
+    assertEquals("JT name mismatch", jt, cs.getJobTrackerInfo());
+    assertEquals("NN name mismatch", nn, cs.getNamenodeInfo());
+    
+    ClusterStats cstats = ClusterStats.getClusterStats();
+    conf.set("mapred.job.tracker", "local");
+    conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "local");
+    JobClient jc = new JobClient(new JobConf(conf));
+    cstats.setClusterMetric(jc.getClusterStatus());
+    
+    cs.update(cstats);
+    
+    // test
+    assertEquals("Cluster summary test failed!", 1, cs.getMaxMapTasks());
+    assertEquals("Cluster summary test failed!", 1, cs.getMaxReduceTasks());
+    assertEquals("Cluster summary test failed!", 1, cs.getNumActiveTrackers());
+    assertEquals("Cluster summary test failed!", 0, 
+                 cs.getNumBlacklistedTrackers());
+  }
+}
\ No newline at end of file

Added: hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestHighRamJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestHighRamJob.java?rev=1185694&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestHighRamJob.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestHighRamJob.java Tue Oct 18 14:45:48 2011
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobTracker;
+import org.apache.hadoop.mapred.gridmix.DebugJobProducer.MockJob;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.tools.rumen.JobStory;
+import org.junit.Test;
+
+/**
+ * Test if Gridmix correctly configures the simulated job's configuration for
+ * high ram job properties.
+ */
+public class TestHighRamJob {
+  /**
+   * A dummy {@link GridmixJob} that opens up the simulated job for testing.
+   */
+  protected static class DummyGridmixJob extends GridmixJob {
+    public DummyGridmixJob(Configuration conf, JobStory desc) 
+    throws IOException {
+      super(conf, System.currentTimeMillis(), desc, new Path("test"), 
+            UserGroupInformation.getCurrentUser(), -1);
+    }
+    
+    /**
+     * Do nothing since this is a dummy gridmix job.
+     */
+    @Override
+    public Job call() throws Exception {
+      return null;
+    }
+    
+    @Override
+    protected boolean canEmulateCompression() {
+      // return false as we don't need compression
+      return false;
+    }
+    
+    protected Job getJob() {
+      // open the simulated job for testing
+      return job;
+    }
+  }
+  
+  private static void testHighRamConfig(long jobMapMB, long jobReduceMB, 
+      long clusterMapMB, long clusterReduceMB, long simulatedClusterMapMB, 
+      long simulatedClusterReduceMB, long expectedMapMB, long expectedReduceMB, 
+      Configuration gConf) 
+  throws IOException {
+    Configuration simulatedJobConf = new Configuration(gConf);
+    simulatedJobConf.setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
+                             simulatedClusterMapMB);
+    simulatedJobConf.setLong(
+        JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
+        simulatedClusterReduceMB);
+    
+    // define a source conf
+    Configuration sourceConf = new Configuration();
+    
+    // configure the original job
+    sourceConf.setLong(JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY, jobMapMB);
+    sourceConf.setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
+                       clusterMapMB);
+    sourceConf.setLong(JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY,
+                       jobReduceMB);
+    sourceConf.setLong(JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
+                       clusterReduceMB);
+    
+    // define a mock job
+    MockJob story = new MockJob(sourceConf);
+    
+    GridmixJob job = new DummyGridmixJob(simulatedJobConf, story);
+    Job simulatedJob = job.getJob();
+    Configuration simulatedConf = simulatedJob.getConfiguration();
+    
+    // check if the high ram properties are not set
+    assertEquals(expectedMapMB, simulatedConf.getLong(
+        JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY,
+        JobConf.DISABLED_MEMORY_LIMIT));
+    assertEquals(expectedReduceMB,
+        simulatedConf.getLong(JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY,
+        JobConf.DISABLED_MEMORY_LIMIT));
+  }
+  
+  /**
+   * Tests high ram job properties configuration.
+   */
+  @SuppressWarnings("deprecation")
+  @Test
+  public void testHighRamFeatureEmulation() throws IOException {
+    // define the gridmix conf
+    Configuration gridmixConf = new Configuration();
+    
+    // test : check high ram emulation disabled
+    gridmixConf.setBoolean(GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE, false);
+    testHighRamConfig(10, 20, 5, 10, JobConf.DISABLED_MEMORY_LIMIT, 
+                      JobConf.DISABLED_MEMORY_LIMIT, 
+                      JobConf.DISABLED_MEMORY_LIMIT, 
+                      JobConf.DISABLED_MEMORY_LIMIT, gridmixConf);
+    
+    // test : check with high ram enabled (default) and no scaling
+    gridmixConf = new Configuration();
+    // set the deprecated max memory limit
+    gridmixConf.setLong(JobConf.UPPER_LIMIT_ON_TASK_VMEM_PROPERTY, 
+                        20*1024*1024);
+    testHighRamConfig(10, 20, 5, 10, 5, 10, 10, 20, gridmixConf);
+    
+    // test : check with high ram enabled and scaling
+    gridmixConf = new Configuration();
+    // set the new max map/reduce memory limits
+    gridmixConf.setLong(JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
+                        100);
+    gridmixConf.setLong(JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
+                        300);
+    testHighRamConfig(10, 45, 5, 15, 50, 100, 100, 300, gridmixConf);
+    
+    // test : check with high ram enabled and map memory scaling mismatch 
+    //        (deprecated)
+    gridmixConf = new Configuration();
+    gridmixConf.setLong(JobConf.UPPER_LIMIT_ON_TASK_VMEM_PROPERTY, 
+                        70*1024*1024);
+    Boolean failed = null;
+    try {
+      testHighRamConfig(10, 45, 5, 15, 50, 100, 100, 300, gridmixConf);
+      failed = false;
+    } catch (Exception e) {
+      failed = true;
+    }
+    assertNotNull(failed);
+    assertTrue("Exception expected for exceeding map memory limit "
+               + "(deprecation)!", failed);
+    
+    // test : check with high ram enabled and reduce memory scaling mismatch 
+    //        (deprecated)
+    gridmixConf = new Configuration();
+    gridmixConf.setLong(JobConf.UPPER_LIMIT_ON_TASK_VMEM_PROPERTY, 
+                        150*1024*1024);
+    failed = null;
+    try {
+      testHighRamConfig(10, 45, 5, 15, 50, 100, 100, 300, gridmixConf);
+      failed = false;
+    } catch (Exception e) {
+      failed = true;
+    }
+    assertNotNull(failed);
+    assertTrue("Exception expected for exceeding reduce memory limit "
+               + "(deprecation)!", failed);
+    
+    // test : check with high ram enabled and scaling mismatch on map limits
+    gridmixConf = new Configuration();
+    gridmixConf.setLong(JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
+                        70);
+    failed = null;
+    try {
+      testHighRamConfig(10, 45, 5, 15, 50, 100, 100, 300, gridmixConf);
+      failed = false;
+    } catch (Exception e) {
+      failed = true;
+    }
+    assertNotNull(failed);
+    assertTrue("Exception expected for exceeding map memory limit!", failed);
+    
+    // test : check with high ram enabled and scaling mismatch on reduce 
+    //        limits
+    gridmixConf = new Configuration();
+    gridmixConf.setLong(JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
+                        200);
+    failed = null;
+    try {
+      testHighRamConfig(10, 45, 5, 15, 50, 100, 100, 300, gridmixConf);
+      failed = false;
+    } catch (Exception e) {
+      failed = true;
+    }
+    assertNotNull(failed);
+    assertTrue("Exception expected for exceeding reduce memory limit!", failed);
+  }
+}
\ No newline at end of file

Added: hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestPseudoLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestPseudoLocalFs.java?rev=1185694&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestPseudoLocalFs.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestPseudoLocalFs.java Tue Oct 18 14:45:48 2011
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import static org.junit.Assert.*;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.Test;
+
+/**
+ * Test the basic functionality of PseudoLocalFs
+ */
+public class TestPseudoLocalFs {
+
+  /**
+   * Test if a file on PseudoLocalFs of a specific size can be opened and read.
+   * Validate the size of the data read.
+   * Test the read methods of {@link PseudoLocalFs.RandomInputStream}.
+   * @throws Exception
+   */
+  @Test
+  public void testPseudoLocalFsFileSize() throws Exception {
+    long fileSize = 10000;
+    Path path = PseudoLocalFs.generateFilePath("myPsedoFile", fileSize);
+    PseudoLocalFs pfs = new PseudoLocalFs();
+    pfs.create(path);
+
+    // Read 1 byte at a time and validate file size.
+    InputStream in = pfs.open(path, 0);
+    long totalSize = 0;
+
+    while (in.read() >= 0) {
+      ++totalSize;
+    }
+    in.close();
+    assertEquals("File size mismatch with read().", fileSize, totalSize);
+
+    // Read data from PseudoLocalFs-based file into buffer to
+    // validate read(byte[]) and file size.
+    in = pfs.open(path, 0);
+    totalSize = 0;
+    byte[] b = new byte[1024];
+    int bytesRead = in.read(b);
+    while (bytesRead >= 0) {
+      totalSize += bytesRead;
+      bytesRead = in.read(b);
+    }
+    assertEquals("File size mismatch with read(byte[]).", fileSize, totalSize);
+  }
+
+  /**
+   * Validate if file status is obtained for correctly formed file paths on
+   * PseudoLocalFs and also verify if appropriate exception is thrown for
+   * invalid file paths.
+   * @param pfs Pseudo Local File System
+   * @param path file path for which getFileStatus() is to be called
+   * @param shouldSucceed <code>true</code> if getFileStatus() should succeed
+   * @throws IOException
+   */
+  private void validateGetFileStatus(FileSystem pfs, Path path,
+      boolean shouldSucceed) throws IOException {
+    boolean expectedExceptionSeen = false;
+    FileStatus stat = null;
+    try {
+      stat = pfs.getFileStatus(path);
+    } catch(FileNotFoundException e) {
+      expectedExceptionSeen = true;
+    }
+    if (shouldSucceed) {
+      assertFalse("getFileStatus() has thrown Exception for valid file name "
+                  + path, expectedExceptionSeen);
+      assertNotNull("Missing file status for a valid file.", stat);
+
+      // validate fileSize
+      String[] parts = path.toUri().getPath().split("\\.");
+      long expectedFileSize = Long.valueOf(parts[parts.length - 1]);
+      assertEquals("Invalid file size.", expectedFileSize, stat.getLen());
+    } else {
+      assertTrue("getFileStatus() did not throw Exception for invalid file "
+                 + " name " + path, expectedExceptionSeen);
+    }
+  }
+
+  /**
+   * Validate if file creation succeeds for correctly formed file paths on
+   * PseudoLocalFs and also verify if appropriate exception is thrown for
+   * invalid file paths.
+   * @param pfs Pseudo Local File System
+   * @param path file path for which create() is to be called
+   * @param shouldSucceed <code>true</code> if create() should succeed
+   * @throws IOException
+   */
+  private void validateCreate(FileSystem pfs, Path path,
+      boolean shouldSucceed) throws IOException {
+    boolean expectedExceptionSeen = false;
+    try {
+      pfs.create(path);
+    } catch(IOException e) {
+      expectedExceptionSeen = true;
+    }
+    if (shouldSucceed) {
+      assertFalse("create() has thrown Exception for valid file name "
+                  + path, expectedExceptionSeen);
+    } else {
+      assertTrue("create() did not throw Exception for invalid file name "
+                 + path, expectedExceptionSeen);
+    }
+  }
+
+  /**
+   * Validate if opening of file succeeds for correctly formed file paths on
+   * PseudoLocalFs and also verify if appropriate exception is thrown for
+   * invalid file paths.
+   * @param pfs Pseudo Local File System
+   * @param path file path for which open() is to be called
+   * @param shouldSucceed <code>true</code> if open() should succeed
+   * @throws IOException
+   */
+  private void validateOpen(FileSystem pfs, Path path,
+      boolean shouldSucceed) throws IOException {
+    boolean expectedExceptionSeen = false;
+    try {
+      pfs.open(path);
+    } catch(IOException e) {
+      expectedExceptionSeen = true;
+    }
+    if (shouldSucceed) {
+      assertFalse("open() has thrown Exception for valid file name "
+                  + path, expectedExceptionSeen);
+    } else {
+      assertTrue("open() did not throw Exception for invalid file name "
+                 + path, expectedExceptionSeen);
+    }
+  }
+
+  /**
+   * Validate if exists() returns <code>true</code> for correctly formed file
+   * paths on PseudoLocalFs and returns <code>false</code> for improperly
+   * formed file paths.
+   * @param pfs Pseudo Local File System
+   * @param path file path for which exists() is to be called
+   * @param shouldSucceed expected return value of exists(&lt;path&gt;)
+   * @throws IOException
+   */
+  private void validateExists(FileSystem pfs, Path path,
+      boolean shouldSucceed) throws IOException {
+    boolean ret = pfs.exists(path);
+    if (shouldSucceed) {
+      assertTrue("exists() returned false for valid file name " + path, ret);
+    } else {
+      assertFalse("exists() returned true for invalid file name " + path, ret);
+    }
+  }
+
+  /**
+   *  Test Pseudo Local File System methods like getFileStatus(), create(),
+   *  open(), exists() for <li> valid file paths and <li> invalid file paths.
+   * @throws IOException
+   */
+  @Test
+  public void testPseudoLocalFsFileNames() throws IOException {
+    PseudoLocalFs pfs = new PseudoLocalFs();
+    Configuration conf = new Configuration();
+    conf.setClass("fs.pseudo.impl", PseudoLocalFs.class, FileSystem.class);
+
+    Path path = new Path("pseudo:///myPsedoFile.1234");
+    FileSystem testFs = path.getFileSystem(conf);
+    assertEquals("Failed to obtain a pseudo local file system object from path",
+                 pfs.getUri().getScheme(), testFs.getUri().getScheme());
+
+    // Validate PseudoLocalFS operations on URI of some other file system
+    path = new Path("file:///myPsedoFile.12345");
+    validateGetFileStatus(pfs, path, false);
+    validateCreate(pfs, path, false);
+    validateOpen(pfs, path, false);
+    validateExists(pfs, path, false);
+
+    path = new Path("pseudo:///myPsedoFile");//.<fileSize> missing
+    validateGetFileStatus(pfs, path, false);
+    validateCreate(pfs, path, false);
+    validateOpen(pfs, path, false);
+    validateExists(pfs, path, false);
+
+    // thing after final '.' is not a number
+    path = new Path("pseudo:///myPsedoFile.txt");
+    validateGetFileStatus(pfs, path, false);
+    validateCreate(pfs, path, false);
+    validateOpen(pfs, path, false);
+    validateExists(pfs, path, false);
+
+    // Generate valid file name(relative path) and validate operations on it
+    long fileSize = 231456;
+    path = PseudoLocalFs.generateFilePath("my.Psedo.File", fileSize);
+    // Validate the above generateFilePath()
+    assertEquals("generateFilePath() failed.", fileSize,
+                 pfs.validateFileNameFormat(path));
+
+    validateGetFileStatus(pfs, path, true);
+    validateCreate(pfs, path, true);
+    validateOpen(pfs, path, true);
+    validateExists(pfs, path, true);
+
+    // Validate operations on valid qualified path
+    path = new Path("myPsedoFile.1237");
+    path = path.makeQualified(pfs);
+    validateGetFileStatus(pfs, path, true);
+    validateCreate(pfs, path, true);
+    validateOpen(pfs, path, true);
+    validateExists(pfs, path, true);
+  }
+}

Added: hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestRandomTextDataGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestRandomTextDataGenerator.java?rev=1185694&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestRandomTextDataGenerator.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestRandomTextDataGenerator.java Tue Oct 18 14:45:48 2011
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.mapred.gridmix.RandomTextDataGenerator;
+
+import static org.junit.Assert.*;
+import org.junit.Test;
+
+/**
+ * Test {@link RandomTextDataGenerator}.
+ */
+public class TestRandomTextDataGenerator {
+  /**
+   * Test if {@link RandomTextDataGenerator} can generate random words of 
+   * desired size.
+   */
+  @Test
+  public void testRandomTextDataGenerator() {
+    RandomTextDataGenerator rtdg = new RandomTextDataGenerator(10, 0L, 5);
+    List<String> words = rtdg.getRandomWords();
+
+    // check the size
+    assertEquals("List size mismatch", 10, words.size());
+
+    // check the words
+    Set<String> wordsSet = new HashSet<String>(words);
+    assertEquals("List size mismatch due to duplicates", 10, wordsSet.size());
+
+    // check the word lengths
+    for (String word : wordsSet) {
+      assertEquals("Word size mismatch", 5, word.length());
+    }
+  }
+  
+  /**
+   * Test if {@link RandomTextDataGenerator} can generate same words given the
+   * same list-size, word-length and seed.
+   */
+  @Test
+  public void testRandomTextDataGeneratorRepeatability() {
+    RandomTextDataGenerator rtdg1 = new RandomTextDataGenerator(10, 0L, 5);
+    List<String> words1 = rtdg1.getRandomWords();
+
+    RandomTextDataGenerator rtdg2 = new RandomTextDataGenerator(10, 0L, 5);
+    List<String> words2 = rtdg2.getRandomWords();
+    
+    assertTrue("List mismatch", words1.equals(words2));
+  }
+  
+  /**
+   * Test if {@link RandomTextDataGenerator} can generate different words given 
+   * different seeds.
+   */
+  @Test
+  public void testRandomTextDataGeneratorUniqueness() {
+    RandomTextDataGenerator rtdg1 = new RandomTextDataGenerator(10, 1L, 5);
+    Set<String> words1 = new HashSet(rtdg1.getRandomWords());
+
+    RandomTextDataGenerator rtdg2 = new RandomTextDataGenerator(10, 0L, 5);
+    Set<String> words2 = new HashSet(rtdg2.getRandomWords());
+    
+    assertFalse("List size mismatch across lists", words1.equals(words2));
+  }
+}

Added: hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java?rev=1185694&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java Tue Oct 18 14:45:48 2011
@@ -0,0 +1,612 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import java.io.IOException;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MapContext;
+import org.apache.hadoop.mapreduce.StatusReporter;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskInputOutputContext;
+import org.apache.hadoop.util.ResourceCalculatorPlugin;
+import org.apache.hadoop.util.ResourceCalculatorPlugin.ProcResourceValues;
+import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
+import org.apache.hadoop.util.DummyResourceCalculatorPlugin;
+import org.apache.hadoop.mapred.TaskTracker;
+import org.apache.hadoop.mapred.gridmix.LoadJob.ResourceUsageMatcherRunner;
+import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.CumulativeCpuUsageEmulatorPlugin;
+import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.ResourceUsageEmulatorPlugin;
+import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.ResourceUsageMatcher;
+import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.CumulativeCpuUsageEmulatorPlugin.DefaultCpuUsageEmulator;
+
+/**
+ * Test Gridmix's resource emulator framework and supported plugins.
+ */
+public class TestResourceUsageEmulators {
+  /**
+   * A {@link ResourceUsageEmulatorPlugin} implementation for testing purpose.
+   * It essentially creates a file named 'test' in the test directory.
+   */
+  static class TestResourceUsageEmulatorPlugin 
+  implements ResourceUsageEmulatorPlugin {
+    static final Path rootTempDir =
+        new Path(System.getProperty("test.build.data", "/tmp"));
+    static final Path tempDir = 
+      new Path(rootTempDir, "TestResourceUsageEmulatorPlugin");
+    static final String DEFAULT_IDENTIFIER = "test";
+    
+    private Path touchPath = null;
+    private FileSystem fs = null;
+    
+    @Override
+    public void emulate() throws IOException, InterruptedException {
+      // add some time between 2 calls to emulate()
+      try {
+        Thread.sleep(1000); // sleep for 1s
+      } catch (Exception e){}
+      
+      try {
+        fs.delete(touchPath, false); // delete the touch file
+        //TODO Search for a better touch utility
+        fs.create(touchPath).close(); // recreate it
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+    
+    protected String getIdentifier() {
+      return DEFAULT_IDENTIFIER;
+    }
+    
+    private static Path getFilePath(String id) {
+      return new Path(tempDir, id);
+    }
+    
+    private static Path getInitFilePath(String id) {
+      return new Path(tempDir, id + ".init");
+    }
+    
+    @Override
+    public void initialize(Configuration conf, ResourceUsageMetrics metrics,
+        ResourceCalculatorPlugin monitor, Progressive progress) {
+      // add some time between 2 calls to initialize()
+      try {
+        Thread.sleep(1000); // sleep for 1s
+      } catch (Exception e){}
+      
+      try {
+        fs = FileSystem.getLocal(conf);
+        
+        Path initPath = getInitFilePath(getIdentifier());
+        fs.delete(initPath, false); // delete the old file
+        fs.create(initPath).close(); // create a new one
+        
+        touchPath = getFilePath(getIdentifier());
+        fs.delete(touchPath, false);
+      } catch (Exception e) {
+        
+      } finally {
+        if (fs != null) {
+          try {
+            fs.deleteOnExit(tempDir);
+          } catch (IOException ioe){}
+        }
+      }
+    }
+    
+    // test if the emulation framework successfully loaded this plugin
+    static long testInitialization(String id, Configuration conf) 
+    throws IOException {
+      Path testPath = getInitFilePath(id);
+      FileSystem fs = FileSystem.getLocal(conf);
+      return fs.exists(testPath) 
+             ? fs.getFileStatus(testPath).getModificationTime() 
+             : 0;
+    }
+    
+    // test if the emulation framework successfully loaded this plugin
+    static long testEmulation(String id, Configuration conf) 
+    throws IOException {
+      Path testPath = getFilePath(id);
+      FileSystem fs = FileSystem.getLocal(conf);
+      return fs.exists(testPath) 
+             ? fs.getFileStatus(testPath).getModificationTime() 
+             : 0;
+    }
+  }
+  
+  /**
+   * Test implementation of {@link ResourceUsageEmulatorPlugin} which creates
+   * a file named 'others' in the test directory.
+   */
+  static class TestOthers extends TestResourceUsageEmulatorPlugin {
+    static final String ID = "others";
+    
+    @Override
+    protected String getIdentifier() {
+      return ID;
+    }
+  }
+  
+  /**
+   * Test implementation of {@link ResourceUsageEmulatorPlugin} which creates
+   * a file named 'cpu' in the test directory.
+   */
+  static class TestCpu extends TestResourceUsageEmulatorPlugin {
+    static final String ID = "cpu";
+    
+    @Override
+    protected String getIdentifier() {
+      return ID;
+    }
+  }
+  
+  /**
+   * Test {@link ResourceUsageMatcher}.
+   */
+  @Test
+  public void testResourceUsageMatcher() throws Exception {
+    ResourceUsageMatcher matcher = new ResourceUsageMatcher();
+    Configuration conf = new Configuration();
+    conf.setClass(ResourceUsageMatcher.RESOURCE_USAGE_EMULATION_PLUGINS, 
+                  TestResourceUsageEmulatorPlugin.class, 
+                  ResourceUsageEmulatorPlugin.class);
+    long currentTime = System.currentTimeMillis();
+    
+    matcher.configure(conf, null, null, null);
+    
+    matcher.matchResourceUsage();
+    
+    String id = TestResourceUsageEmulatorPlugin.DEFAULT_IDENTIFIER;
+    long result = 
+      TestResourceUsageEmulatorPlugin.testInitialization(id, conf);
+    assertTrue("Resource usage matcher failed to initialize the configured"
+               + " plugin", result > currentTime);
+    result = TestResourceUsageEmulatorPlugin.testEmulation(id, conf);
+    assertTrue("Resource usage matcher failed to load and emulate the"
+               + " configured plugin", result > currentTime);
+    
+    // test plugin order to first emulate cpu and then others
+    conf.setStrings(ResourceUsageMatcher.RESOURCE_USAGE_EMULATION_PLUGINS, 
+                    TestCpu.class.getName() + "," + TestOthers.class.getName());
+    
+    matcher.configure(conf, null, null, null);
+
+    // test the initialization order
+    long time1 = 
+           TestResourceUsageEmulatorPlugin.testInitialization(TestCpu.ID, conf);
+    long time2 = 
+           TestResourceUsageEmulatorPlugin.testInitialization(TestOthers.ID, 
+                                                              conf);
+    assertTrue("Resource usage matcher failed to initialize the configured"
+               + " plugins in order", time1 < time2);
+    
+    matcher.matchResourceUsage();
+
+    // Note that the cpu usage emulator plugin is configured 1st and then the
+    // others plugin.
+    time1 = 
+      TestResourceUsageEmulatorPlugin.testInitialization(TestCpu.ID, conf);
+    time2 = 
+      TestResourceUsageEmulatorPlugin.testInitialization(TestOthers.ID, 
+                                                         conf);
+    assertTrue("Resource usage matcher failed to load the configured plugins", 
+               time1 < time2);
+  }
+  
+  /**
+   * Fakes the cumulative usage using {@link FakeCpuUsageEmulatorCore}.
+   */
+  static class FakeResourceUsageMonitor extends DummyResourceCalculatorPlugin {
+    private FakeCpuUsageEmulatorCore core;
+    
+    public FakeResourceUsageMonitor(FakeCpuUsageEmulatorCore core) {
+      this.core = core;
+    }
+    
+    /**
+     * A dummy CPU usage monitor. Every call to 
+     * {@link ResourceCalculatorPlugin#getCumulativeCpuTime()} will return the 
+     * value of {@link FakeCpuUsageEmulatorCore#getNumCalls()}.
+     */
+    @Override
+    public long getCumulativeCpuTime() {
+      return core.getCpuUsage();
+    }
+
+    /**
+     * Returns a {@link ProcResourceValues} with cumulative cpu usage  
+     * computed using {@link #getCumulativeCpuTime()}.
+     */
+    @Override
+    public ProcResourceValues getProcResourceValues() {
+      long usageValue = getCumulativeCpuTime();
+      return new ProcResourceValues(usageValue, -1, -1);
+    }
+  }
+  
+  /**
+   * A dummy {@link Progressive} implementation that allows users to set the
+   * progress for testing. The {@link Progressive#getProgress()} call will 
+   * return the last progress value set using 
+   * {@link FakeProgressive#setProgress(float)}.
+   */
+  static class FakeProgressive implements Progressive {
+    private float progress = 0F;
+    @Override
+    public float getProgress() {
+      return progress;
+    }
+    
+    void setProgress(float progress) {
+      this.progress = progress;
+    }
+  }
+  
+  /**
+   * A dummy reporter for {@link LoadJob.ResourceUsageMatcherRunner}.
+   */
+  private static class DummyReporter extends StatusReporter {
+    private Progressive progress;
+    
+    DummyReporter(Progressive progress) {
+      this.progress = progress;
+    }
+    
+    @Override
+    public org.apache.hadoop.mapreduce.Counter getCounter(Enum<?> name) {
+      return null;
+    }
+    
+    @Override
+    public org.apache.hadoop.mapreduce.Counter getCounter(String group,
+                                                          String name) {
+      return null;
+    }
+    
+    @Override
+    public void progress() {
+    }
+    
+    @Override
+    public float getProgress() {
+      return progress.getProgress();
+    }
+    
+    @Override
+    public void setStatus(String status) {
+    }
+  }
+  
+  // Extends ResourceUsageMatcherRunner for testing.
+  @SuppressWarnings("unchecked")
+  private static class FakeResourceUsageMatcherRunner 
+  extends ResourceUsageMatcherRunner {
+    FakeResourceUsageMatcherRunner(TaskInputOutputContext context, 
+                                   ResourceUsageMetrics metrics) {
+      super(context, metrics);
+    }
+    
+    // test ResourceUsageMatcherRunner
+    void test() throws Exception {
+      super.match();
+    }
+  }
+  
+  /**
+   * Test {@link LoadJob.ResourceUsageMatcherRunner}.
+   */
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testResourceUsageMatcherRunner() throws Exception {
+    Configuration conf = new Configuration();
+    FakeProgressive progress = new FakeProgressive();
+    
+    // set the resource calculator plugin
+    conf.setClass(TaskTracker.TT_RESOURCE_CALCULATOR_PLUGIN,
+                  DummyResourceCalculatorPlugin.class, 
+                  ResourceCalculatorPlugin.class);
+    // set the resources
+    // set the resource implementation class
+    conf.setClass(ResourceUsageMatcher.RESOURCE_USAGE_EMULATION_PLUGINS, 
+                  TestResourceUsageEmulatorPlugin.class, 
+                  ResourceUsageEmulatorPlugin.class);
+    
+    long currentTime = System.currentTimeMillis();
+    
+    // initialize the matcher class
+    TaskAttemptID id = new TaskAttemptID("test", 1, true, 1, 1);
+    StatusReporter reporter = new DummyReporter(progress);
+    TaskInputOutputContext context = 
+      new MapContext(conf, id, null, null, null, reporter, null);
+    FakeResourceUsageMatcherRunner matcher = 
+      new FakeResourceUsageMatcherRunner(context, null);
+    
+    // check if the matcher initialized the plugin
+    String identifier = TestResourceUsageEmulatorPlugin.DEFAULT_IDENTIFIER;
+    long initTime = 
+      TestResourceUsageEmulatorPlugin.testInitialization(identifier, conf);
+    assertTrue("ResourceUsageMatcherRunner failed to initialize the"
+               + " configured plugin", initTime > currentTime);
+    
+    // check the progress
+    assertEquals("Progress mismatch in ResourceUsageMatcherRunner", 
+                 0, progress.getProgress(), 0D);
+    
+    // call match() and check progress
+    progress.setProgress(0.01f);
+    currentTime = System.currentTimeMillis();
+    matcher.test();
+    long emulateTime = 
+      TestResourceUsageEmulatorPlugin.testEmulation(identifier, conf);
+    assertTrue("ProgressBasedResourceUsageMatcher failed to load and emulate"
+               + " the configured plugin", emulateTime > currentTime);
+  }
+  
+  /**
+   * Test {@link CumulativeCpuUsageEmulatorPlugin}'s core CPU usage emulation 
+   * engine.
+   */
+  @Test
+  public void testCpuUsageEmulator() throws IOException {
+    // test CpuUsageEmulator calibration with fake resource calculator plugin
+    long target = 100000L; // 100 secs
+    int unitUsage = 50;
+    FakeCpuUsageEmulatorCore fakeCpuEmulator = new FakeCpuUsageEmulatorCore();
+    fakeCpuEmulator.setUnitUsage(unitUsage);
+    FakeResourceUsageMonitor fakeMonitor = 
+      new FakeResourceUsageMonitor(fakeCpuEmulator);
+    
+    // calibrate for 100ms
+    fakeCpuEmulator.calibrate(fakeMonitor, target);
+    
+    // by default, CpuUsageEmulator.calibrate() will consume 100ms of CPU usage
+    assertEquals("Fake calibration failed", 
+                 100, fakeMonitor.getCumulativeCpuTime());
+    assertEquals("Fake calibration failed", 
+                 100, fakeCpuEmulator.getCpuUsage());
+    // by default, CpuUsageEmulator.performUnitComputation() will be called 
+    // twice
+    assertEquals("Fake calibration failed", 
+                 2, fakeCpuEmulator.getNumCalls());
+  }
+  
+  /**
+   * This is a dummy class that fakes CPU usage.
+   */
+  private static class FakeCpuUsageEmulatorCore 
+  extends DefaultCpuUsageEmulator {
+    private int numCalls = 0;
+    private int unitUsage = 1;
+    private int cpuUsage = 0;
+    
+    @Override
+    protected void performUnitComputation() {
+      ++numCalls;
+      cpuUsage += unitUsage;
+    }
+    
+    int getNumCalls() {
+      return numCalls;
+    }
+    
+    int getCpuUsage() {
+      return cpuUsage;
+    }
+    
+    void reset() {
+      numCalls = 0;
+      cpuUsage = 0;
+    }
+    
+    void setUnitUsage(int unitUsage) {
+      this.unitUsage = unitUsage;
+    }
+  }
+  
+  // Creates a ResourceUsageMetrics object from the target usage
+  static ResourceUsageMetrics createMetrics(long target) {
+    ResourceUsageMetrics metrics = new ResourceUsageMetrics();
+    metrics.setCumulativeCpuUsage(target);
+    metrics.setVirtualMemoryUsage(target);
+    metrics.setPhysicalMemoryUsage(target);
+    metrics.setHeapUsage(target);
+    return metrics;
+  }
+  
+  /**
+   * Test {@link CumulativeCpuUsageEmulatorPlugin}.
+   */
+  @Test
+  public void testCumulativeCpuUsageEmulatorPlugin() throws Exception {
+    Configuration conf = new Configuration();
+    long targetCpuUsage = 1000L;
+    int unitCpuUsage = 50;
+    
+    // fake progress indicator
+    FakeProgressive fakeProgress = new FakeProgressive();
+    
+    // fake cpu usage generator
+    FakeCpuUsageEmulatorCore fakeCore = new FakeCpuUsageEmulatorCore();
+    fakeCore.setUnitUsage(unitCpuUsage);
+    
+    // a cumulative cpu usage emulator with fake core
+    CumulativeCpuUsageEmulatorPlugin cpuPlugin = 
+      new CumulativeCpuUsageEmulatorPlugin(fakeCore);
+    
+    // test with invalid or missing resource usage value
+    ResourceUsageMetrics invalidUsage = createMetrics(0);
+    cpuPlugin.initialize(conf, invalidUsage, null, null);
+    
+    // test if disabled cpu emulation plugin's emulate() call is a no-operation
+    // this will test if the emulation plugin is disabled or not
+    int numCallsPre = fakeCore.getNumCalls();
+    long cpuUsagePre = fakeCore.getCpuUsage();
+    cpuPlugin.emulate();
+    int numCallsPost = fakeCore.getNumCalls();
+    long cpuUsagePost = fakeCore.getCpuUsage();
+    
+    //  test if no calls are made cpu usage emulator core
+    assertEquals("Disabled cumulative CPU usage emulation plugin works!", 
+                 numCallsPre, numCallsPost);
+    
+    //  test if no calls are made cpu usage emulator core
+    assertEquals("Disabled cumulative CPU usage emulation plugin works!", 
+                 cpuUsagePre, cpuUsagePost);
+    
+    // test with valid resource usage value
+    ResourceUsageMetrics metrics = createMetrics(targetCpuUsage);
+    
+    // fake monitor
+    ResourceCalculatorPlugin monitor = new FakeResourceUsageMonitor(fakeCore);
+    
+    // test with default emulation interval
+    testEmulationAccuracy(conf, fakeCore, monitor, metrics, cpuPlugin, 
+                          targetCpuUsage, targetCpuUsage / unitCpuUsage);
+    
+    // test with custom value for emulation interval of 20%
+    conf.setFloat(CumulativeCpuUsageEmulatorPlugin.CPU_EMULATION_PROGRESS_INTERVAL,
+                  0.2F);
+    testEmulationAccuracy(conf, fakeCore, monitor, metrics, cpuPlugin, 
+                          targetCpuUsage, targetCpuUsage / unitCpuUsage);
+    
+    // test if emulation interval boundary is respected (unit usage = 1)
+    //  test the case where the current progress is less than threshold
+    fakeProgress = new FakeProgressive(); // initialize
+    fakeCore.reset();
+    fakeCore.setUnitUsage(1);
+    conf.setFloat(CumulativeCpuUsageEmulatorPlugin.CPU_EMULATION_PROGRESS_INTERVAL,
+                  0.25F);
+    cpuPlugin.initialize(conf, metrics, monitor, fakeProgress);
+    // take a snapshot after the initialization
+    long initCpuUsage = monitor.getCumulativeCpuTime();
+    long initNumCalls = fakeCore.getNumCalls();
+    // test with 0 progress
+    testEmulationBoundary(0F, fakeCore, fakeProgress, cpuPlugin, initCpuUsage, 
+                          initNumCalls, "[no-op, 0 progress]");
+    // test with 24% progress
+    testEmulationBoundary(0.24F, fakeCore, fakeProgress, cpuPlugin, 
+                          initCpuUsage, initNumCalls, "[no-op, 24% progress]");
+    // test with 25% progress
+    //  target = 1000ms, target emulation at 25% = 250ms, 
+    //  weighed target = 1000 * 0.25^4 (we are using progress^4 as the weight)
+    //                 ~ 4
+    //  but current usage = init-usage = 100, hence expected = 100
+    testEmulationBoundary(0.25F, fakeCore, fakeProgress, cpuPlugin, 
+                          initCpuUsage, initNumCalls, "[op, 25% progress]");
+    
+    // test with 80% progress
+    //  target = 1000ms, target emulation at 80% = 800ms, 
+    //  weighed target = 1000 * 0.25^4 (we are using progress^4 as the weight)
+    //                 ~ 410
+    //  current-usage = init-usage = 100, hence expected-usage = 410
+    testEmulationBoundary(0.80F, fakeCore, fakeProgress, cpuPlugin, 410, 410, 
+                          "[op, 80% progress]");
+    
+    // now test if the final call with 100% progress ramps up the CPU usage
+    testEmulationBoundary(1F, fakeCore, fakeProgress, cpuPlugin, targetCpuUsage,
+                          targetCpuUsage, "[op, 100% progress]");
+    
+    // test if emulation interval boundary is respected (unit usage = 50)
+    //  test the case where the current progress is less than threshold
+    fakeProgress = new FakeProgressive(); // initialize
+    fakeCore.reset();
+    fakeCore.setUnitUsage(unitCpuUsage);
+    conf.setFloat(CumulativeCpuUsageEmulatorPlugin.CPU_EMULATION_PROGRESS_INTERVAL,
+                  0.40F);
+    cpuPlugin.initialize(conf, metrics, monitor, fakeProgress);
+    // take a snapshot after the initialization
+    initCpuUsage = monitor.getCumulativeCpuTime();
+    initNumCalls = fakeCore.getNumCalls();
+    // test with 0 progress
+    testEmulationBoundary(0F, fakeCore, fakeProgress, cpuPlugin, initCpuUsage, 
+                          initNumCalls, "[no-op, 0 progress]");
+    // test with 39% progress
+    testEmulationBoundary(0.39F, fakeCore, fakeProgress, cpuPlugin, 
+                          initCpuUsage, initNumCalls, "[no-op, 39% progress]");
+    // test with 40% progress
+    //  target = 1000ms, target emulation at 40% = 4000ms, 
+    //  weighed target = 1000 * 0.40^4 (we are using progress^4 as the weight)
+    //                 ~ 26
+    // current-usage = init-usage = 100, hence expected-usage = 100
+    testEmulationBoundary(0.40F, fakeCore, fakeProgress, cpuPlugin, 
+                          initCpuUsage, initNumCalls, "[op, 40% progress]");
+    
+    // test with 90% progress
+    //  target = 1000ms, target emulation at 90% = 900ms, 
+    //  weighed target = 1000 * 0.90^4 (we are using progress^4 as the weight)
+    //                 ~ 657
+    //  current-usage = init-usage = 100, hence expected-usage = 657 but 
+    //  the fake-core increases in steps of 50, hence final target = 700
+    testEmulationBoundary(0.90F, fakeCore, fakeProgress, cpuPlugin, 700, 
+                          700 / unitCpuUsage, "[op, 90% progress]");
+    
+    // now test if the final call with 100% progress ramps up the CPU usage
+    testEmulationBoundary(1F, fakeCore, fakeProgress, cpuPlugin, targetCpuUsage,
+                          targetCpuUsage / unitCpuUsage, "[op, 100% progress]");
+  }
+  
+  // test whether the CPU usage emulator achieves the desired target using
+  // desired calls to the underling core engine.
+  private static void testEmulationAccuracy(Configuration conf, 
+                        FakeCpuUsageEmulatorCore fakeCore,
+                        ResourceCalculatorPlugin monitor,
+                        ResourceUsageMetrics metrics,
+                        CumulativeCpuUsageEmulatorPlugin cpuPlugin,
+                        long expectedTotalCpuUsage, long expectedTotalNumCalls) 
+  throws Exception {
+    FakeProgressive fakeProgress = new FakeProgressive();
+    fakeCore.reset();
+    cpuPlugin.initialize(conf, metrics, monitor, fakeProgress);
+    int numLoops = 0;
+    while (fakeProgress.getProgress() < 1) {
+      ++numLoops;
+      float progress = (float)numLoops / 100;
+      fakeProgress.setProgress(progress);
+      cpuPlugin.emulate();
+    }
+    
+    // test if the resource plugin shows the expected invocations
+    assertEquals("Cumulative cpu usage emulator plugin failed (num calls)!", 
+                 expectedTotalNumCalls, fakeCore.getNumCalls(), 0L);
+    // test if the resource plugin shows the expected usage
+    assertEquals("Cumulative cpu usage emulator plugin failed (total usage)!", 
+                 expectedTotalCpuUsage, fakeCore.getCpuUsage(), 0L);
+  }
+  
+  // tests if the CPU usage emulation plugin emulates only at the expected
+  // progress gaps
+  private static void testEmulationBoundary(float progress, 
+      FakeCpuUsageEmulatorCore fakeCore, FakeProgressive fakeProgress, 
+      CumulativeCpuUsageEmulatorPlugin cpuPlugin, long expectedTotalCpuUsage, 
+      long expectedTotalNumCalls, String info) throws Exception {
+    fakeProgress.setProgress(progress);
+    cpuPlugin.emulate();
+    
+    assertEquals("Emulation interval test for cpu usage failed " + info + "!", 
+                 expectedTotalCpuUsage, fakeCore.getCpuUsage(), 0L);
+    assertEquals("Emulation interval test for num calls failed " + info + "!", 
+                 expectedTotalNumCalls, fakeCore.getNumCalls(), 0L);
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestUserResolve.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestUserResolve.java?rev=1185694&r1=1185693&r2=1185694&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestUserResolve.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestUserResolve.java Tue Oct 18 14:45:48 2011
@@ -33,23 +33,31 @@ import org.apache.hadoop.security.UserGr
 
 public class TestUserResolve {
 
-  static Path userlist;
+  private static Path rootDir = null;
+  private static Configuration conf = null;
+  private static FileSystem fs = null;
 
   @BeforeClass
-  public static void writeUserList() throws IOException {
-    final Configuration conf = new Configuration();
-    final FileSystem fs = FileSystem.getLocal(conf);
-    final Path wd = new Path(new Path(
-          System.getProperty("test.build.data", "/tmp")).makeQualified(fs),
-        "gridmixUserResolve");
-    userlist = new Path(wd, "users");
+  public static void createRootDir() throws IOException {
+    conf = new Configuration();
+    fs = FileSystem.getLocal(conf);
+    rootDir = new Path(new Path(System.getProperty("test.build.data", "/tmp"))
+                  .makeQualified(fs), "gridmixUserResolve");
+  }
+
+  /**
+   * Creates users file with the content as the String usersFileContent.
+   * @param usersFilePath    the path to the file that is to be created
+   * @param usersFileContent Content of users file
+   * @throws IOException
+   */
+  private static void writeUserList(Path usersFilePath, String usersFileContent)
+  throws IOException {
+
     FSDataOutputStream out = null;
     try {
-      out = fs.create(userlist, true);
-      out.writeBytes("user0,groupA,groupB,groupC\n");
-      out.writeBytes("user1,groupA,groupC\n");
-      out.writeBytes("user2,groupB\n");
-      out.writeBytes("user3,groupA,groupB,groupC\n");
+      out = fs.create(usersFilePath, true);
+      out.writeBytes(usersFileContent);
     } finally {
       if (out != null) {
         out.close();
@@ -57,42 +65,110 @@ public class TestUserResolve {
     }
   }
 
-  @Test
-  public void testRoundRobinResolver() throws Exception {
-    final Configuration conf = new Configuration();
-    final UserResolver rslv = new RoundRobinUserResolver();
-
+  /**
+   * Validate RoundRobinUserResolver's behavior for bad user resource file.
+   * RoundRobinUserResolver.setTargetUsers() should throw proper Exception for
+   * the cases like
+   * <li> non existent user resource file and
+   * <li> empty user resource file
+   *
+   * @param rslv              The RoundRobinUserResolver object
+   * @param userRsrc          users file
+   * @param expectedErrorMsg  expected error message
+   */
+  private void validateBadUsersFile(UserResolver rslv, URI userRsrc,
+      String expectedErrorMsg) {
     boolean fail = false;
     try {
-      rslv.setTargetUsers(null, conf);
+      rslv.setTargetUsers(userRsrc, conf);
     } catch (IOException e) {
+      assertTrue("Exception message from RoundRobinUserResolver is wrong",
+          e.getMessage().equals(expectedErrorMsg));
       fail = true;
     }
     assertTrue("User list required for RoundRobinUserResolver", fail);
+  }
+
+  /**
+   * Validate the behavior of {@link RoundRobinUserResolver} for different
+   * user resource files like
+   * <li> Empty user resource file
+   * <li> Non existent user resource file
+   * <li> User resource file with valid content
+   * @throws Exception
+   */
+  @Test
+  public void testRoundRobinResolver() throws Exception {
+
+    final UserResolver rslv = new RoundRobinUserResolver();
+    Path usersFilePath = new Path(rootDir, "users");
+    URI userRsrc = new URI(usersFilePath.toString());
+
+    // Check if the error message is as expected for non existent
+    // user resource file.
+    fs.delete(usersFilePath, false);
+    String expectedErrorMsg = "File " + userRsrc + " does not exist.";
+    validateBadUsersFile(rslv, userRsrc, expectedErrorMsg);
+
+    // Check if the error message is as expected for empty user resource file
+    writeUserList(usersFilePath, "");// creates empty users file
+    expectedErrorMsg =
+      RoundRobinUserResolver.buildEmptyUsersErrorMsg(userRsrc);
+    validateBadUsersFile(rslv, userRsrc, expectedErrorMsg);
+
+    // Create user resource file with valid content like older users list file
+    // with usernames and groups
+    writeUserList(usersFilePath,
+    "user0,groupA,groupB,groupC\nuser1,groupA,groupC\n");
+    validateValidUsersFile(rslv, userRsrc);
+
+    // Create user resource file with valid content with
+    // usernames with groups and without groups
+    writeUserList(usersFilePath, "user0,groupA,groupB\nuser1,");
+    validateValidUsersFile(rslv, userRsrc);
+
+    // Create user resource file with valid content with
+    // usernames without groups
+    writeUserList(usersFilePath, "user0\nuser1");
+    validateValidUsersFile(rslv, userRsrc);
+  }
 
-    rslv.setTargetUsers(new URI(userlist.toString()), conf);
-    UserGroupInformation ugi1;
-    assertEquals("user0", 
-        rslv.getTargetUgi((ugi1 = 
-          UserGroupInformation.createRemoteUser("hfre0"))).getUserName());
-    assertEquals("user1", rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre1")).getUserName());
-    assertEquals("user2", rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre2")).getUserName());
+  // Validate RoundRobinUserResolver for the case of
+  // user resource file with valid content.
+  private void validateValidUsersFile(UserResolver rslv, URI userRsrc)
+      throws IOException {
+    assertTrue(rslv.setTargetUsers(userRsrc, conf));
+    UserGroupInformation ugi1 = UserGroupInformation.createRemoteUser("hfre0");
     assertEquals("user0", rslv.getTargetUgi(ugi1).getUserName());
-    assertEquals("user3", rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre3")).getUserName());
+    assertEquals("user1",
+        rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre1"))
+            .getUserName());
+    assertEquals("user0",
+        rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre2"))
+            .getUserName());
     assertEquals("user0", rslv.getTargetUgi(ugi1).getUserName());
+    assertEquals("user1",
+        rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre3"))
+            .getUserName());
+
+    // Verify if same user comes again, its mapped user name should be
+    // correct even though UGI is constructed again.
+    assertEquals("user0", rslv.getTargetUgi(
+        UserGroupInformation.createRemoteUser("hfre0")).getUserName());
+    assertEquals("user0",
+        rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre5"))
+        .getUserName());
+    assertEquals("user0",
+        rslv.getTargetUgi(UserGroupInformation.createRemoteUser("hfre0"))
+        .getUserName());
   }
 
   @Test
   public void testSubmitterResolver() throws Exception {
-    final Configuration conf = new Configuration();
     final UserResolver rslv = new SubmitterUserResolver();
-    rslv.setTargetUsers(null, conf);
+    assertFalse(rslv.needsTargetUsersList());
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
     assertEquals(ugi, rslv.getTargetUgi((UserGroupInformation)null));
-    System.out.println(" Submitter current user " + ugi);
-    System.out.println(
-      " Target ugi " + rslv.getTargetUgi(
-        (UserGroupInformation) null));
   }
 
 }

Modified: hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/util/Progress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/util/Progress.java?rev=1185694&r1=1185693&r2=1185694&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/util/Progress.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/util/Progress.java Tue Oct 18 14:45:48 2011
@@ -96,6 +96,7 @@ public class Progress {
     return node.getInternal();
   }
 
+  
   /** Computes progress in this node. */
   private synchronized float getInternal() {
     int phaseCount = phases.size();
@@ -108,6 +109,14 @@ public class Progress {
     }
   }
 
+  /**
+   * Returns progress in this node. get() would give overall progress of the
+   * root node(not just given current node).
+   */
+  public synchronized float getProgress() {
+    return getInternal();
+  }
+
   public synchronized void setStatus(String status) {
     this.status = status;
   }