You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by om...@apache.org on 2011/03/08 07:01:53 UTC

svn commit: r1079264 - in /hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix: ./ test/system/

Author: omalley
Date: Tue Mar  8 06:01:53 2011
New Revision: 1079264

URL: http://svn.apache.org/viewvc?rev=1079264&view=rev
Log:
commit 2dcb356865f5eeb85870e71015b4923a9f4c2b37
Author: Vinay Kumar Thota <vi...@yahoo-inc.com>
Date:   Fri Feb 11 03:54:05 2011 -0800

    4324033 from
    
    +++ b/YAHOO-CHANGES.txt
    +
    +  Bug 4324033 [MR-2138] : Gridmix tests with different time
    +  interval mr traces (1min, 3min,5min,7min,10min,12min).Patch
    +  available at
    +
    +
    +  Bug 3964674 [MR-2081] : Implement a functionality for generating
    +  the Gridmix traces.Patch available at
    +   (vinayt)

Added:
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/GridmixSystemTestCase.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobStory.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobSubmission.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java
Modified:
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridMixConfig.java
    hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/UtilsForGridmix.java

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/GridmixSystemTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/GridmixSystemTestCase.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/GridmixSystemTestCase.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/GridmixSystemTestCase.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.test.system.MRCluster;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JTClient;
+import org.apache.hadoop.mapred.gridmix.test.system.GridmixJobSubmission;
+import org.apache.hadoop.mapred.gridmix.test.system.GridmixJobVerification;
+import org.apache.hadoop.mapred.gridmix.test.system.UtilsForGridmix;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.mapreduce.JobID;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.List;
+import java.util.Collection;
+import java.io.IOException;
+
+/**
+ * Run and verify the Gridmix jobs for given a trace.
+ */
+public class GridmixSystemTestCase {
+  private static final Log LOG = LogFactory
+      .getLog(GridmixSystemTestCase.class);
+  public static Configuration  conf = new Configuration();
+  public static MRCluster cluster;
+  public static int cSize;
+  public static JTClient jtClient;
+  public static JTProtocol rtClient;
+  public static Path gridmixDir;
+  public static Map<String, String> map;
+  private static GridmixJobSubmission gridmixJS;
+  public static GridmixJobVerification gridmixJV;
+  public static List<JobID> jobids;
+  
+  @BeforeClass
+  public static void before() throws Exception {
+    String [] excludeExpList = {"java.net.ConnectException", 
+       "java.io.IOException"};
+    cluster = MRCluster.createCluster(conf);
+    cluster.setExcludeExpList(excludeExpList);
+    cluster.setUp();
+    cSize = cluster.getTTClients().size();
+    jtClient = cluster.getJTClient();
+    rtClient = jtClient.getProxy();
+    gridmixDir = new Path("herriot-gridmix");
+    UtilsForGridmix.createDirs(gridmixDir, rtClient.getDaemonConf());
+    map = UtilsForGridmix.getMRTraces(rtClient.getDaemonConf());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    UtilsForGridmix.cleanup(gridmixDir, rtClient.getDaemonConf());
+    org.apache.hadoop.fs.FileUtil.fullyDelete(new java.io.File("/tmp/gridmix-st"));
+    cluster.tearDown();
+    if (gridmixJS.getJobConf().get("gridmix.user.resolve.class").
+        contains("RoundRobin")) {
+       List<String> proxyUsers = UtilsForGridmix.
+           listProxyUsers(gridmixJS.getJobConf(),
+           UserGroupInformation.getLoginUser().getShortUserName());
+       for(int index = 0; index < proxyUsers.size(); index++){
+         UtilsForGridmix.cleanup(new Path("hdfs:///user/" + 
+            proxyUsers.get(index)), 
+            rtClient.getDaemonConf());
+       }
+    }
+  }
+  
+  public static void runGridmixAndVerify(String[] runtimeValues, 
+      String [] otherValues, String tracePath) throws Exception {
+    gridmixJS = new GridmixJobSubmission(rtClient.getDaemonConf(), 
+        jtClient, gridmixDir);
+    gridmixJS.submitJobs(runtimeValues, otherValues);
+    jobids = UtilsForGridmix.listGridmixJobIDs(jtClient.getClient(), 
+        gridmixJS.getGridmixJobCount());
+    gridmixJV = new GridmixJobVerification(
+        new Path(tracePath), gridmixJS.getJobConf(), jtClient);
+    gridmixJV.verifyGridmixJobsWithJobStories(jobids);  
+  }
+  
+  
+  public static String getTraceFile(String regExp) throws IOException {
+    List<String> listTraces = UtilsForGridmix.listMRTraces(
+        rtClient.getDaemonConf());
+    Iterator<String> ite = listTraces.iterator();
+    while(ite.hasNext()) {
+      String traceFile = ite.next();
+      if (traceFile.indexOf(regExp)>=0) {
+        return traceFile;
+      }
+    }
+    return null;
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.mapred.gridmix.test.system.UtilsForGridmix;
+import org.apache.hadoop.mapred.gridmix.test.system.GridMixConfig;
+import org.junit.Test;
+
+/**
+ * Run the Gridmix with 10 minutes MR jobs trace and 
+ * verify each job history against the corresponding job story 
+ * in a given trace file.
+ */
+public class TestGridmixWith10minTrace extends GridmixSystemTestCase {
+  private static final Log LOG = LogFactory.
+     getLog(TestGridmixWith10minTrace.class);
+
+  /**
+   * Generate data and run gridmix by sleep jobs with STRESS submission 
+   * policy in a RoundRobinUserResolver mode against 10 minutes trace file.
+   * Verify each Gridmix job history with a corresponding job story 
+   * in a trace file after completion of all the jobs execution.
+   * @throws Exception - if an error occurs.
+   */
+  @Test
+  public void testGridmixWith10minTrace() throws Exception {
+    final long inputSize = cSize * 250;
+    final long minFileSize = 200 * 1024 * 1024;
+    String [] runtimeValues ={"SLEEPJOB",
+       RoundRobinUserResolver.class.getName(),
+       "SERIAL",
+       inputSize+"m",
+       "file://" + UtilsForGridmix.getProxyUsersFile(conf),
+       map.get("10m")};
+
+    String [] otherArgs = {
+       "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+       "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false",
+       "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize,
+       "-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=false",
+       "-D", GridMixConfig.GRIDMIX_SLEEPJOB_MAPTASK_ONLY + "=true",
+       "-D", GridMixConfig.GRIDMIX_SLEEP_MAP_MAX_TIME + "=10"};
+    String tracePath = map.get("10m");
+    runGridmixAndVerify(runtimeValues, otherArgs,tracePath);
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.mapred.gridmix.test.system.GridMixConfig;
+import org.junit.Test;
+
+/**
+ * Run the Gridmix with 12 minutes MR job traces and 
+ * verify each job history against the corresponding job story 
+ * in a given trace file.
+ */
+public class TestGridmixWith12minTrace extends GridmixSystemTestCase {
+  private static final Log LOG = LogFactory.
+     getLog(TestGridmixWith12minTrace.class);
+ 
+  /**
+   * Generate data and run gridmix sleep jobs with REPLAY submission 
+   * policy in a SubmitterUserResolver mode against 12 minutes trace file.
+   * Verify each Gridmix job history with a corresponding job story 
+   * in a trace file after completion of all the jobs execution.
+   * @throws Exception - if an error occurs.
+   */
+  @Test
+  public void testGridmixWith12minTrace() throws Exception {
+    final long inputSize = cSize * 150;
+    String [] runtimeValues ={"SLEEPJOB",
+       SubmitterUserResolver.class.getName(),
+       "REPLAY",
+       inputSize+"m",
+       map.get("12m")};
+
+    String [] otherArgs = {
+        "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+        "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false",
+        "-D", GridMixConfig.GRIDMIX_SLEEP_MAP_MAX_TIME + "=10",
+        "-D", GridMixConfig.GRIDMIX_SLEEP_REDUCE_MAX_TIME + "=5"};
+    String tracePath = map.get("12m");
+    runGridmixAndVerify(runtimeValues, otherArgs, tracePath);
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.junit.Test;
+import org.apache.hadoop.mapred.gridmix.test.system.GridMixConfig;
+
+/**
+ * Run the Gridmix with 1 minute MR jobs trace and 
+ * verify each job history against the corresponding job story 
+ * in a given trace file.
+ */
+public class TestGridmixWith1minTrace extends GridmixSystemTestCase{
+  private static final Log LOG = LogFactory.
+      getLog(TestGridmixWith1minTrace.class);
+
+  /**
+   * Generate data and run gridmix by load job with STRESS submission policy
+   * in a SubmitterUserResolver mode against 1 minute trace file. 
+   * Verify each Gridmix job history with a corresponding job story in the 
+   * trace after completion of all the jobs execution.
+   * @throws Exception - if an error occurs.
+   */
+  @Test
+  public void testGridmixWith1minTrace() throws Exception {
+    final long inputSize = cSize * 400;
+    String [] runtimeValues ={"LOADJOB",
+       SubmitterUserResolver.class.getName(),
+       "STRESS",
+       inputSize+"m",
+       map.get("1m")};
+    String [] otherArgs = {
+      "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE +
+      "=false",
+      "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE +
+      "=false"
+    };
+    String tracePath = map.get("1m");
+    runGridmixAndVerify(runtimeValues, otherArgs, tracePath);
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.mapred.gridmix.test.system.UtilsForGridmix;
+import org.apache.hadoop.mapred.gridmix.test.system.GridMixConfig;
+import org.junit.Test;
+
+/**
+ * Run the Gridmix with 3 minutes MR jobs trace and 
+ * verify each job history against the corresponding job story 
+ * in a given trace file.
+ */
+public class TestGridmixWith3minTrace extends GridmixSystemTestCase {
+  private static final Log LOG = LogFactory.
+      getLog(TestGridmixWith3minTrace.class);
+
+  /**
+   * Generate data and run gridmix by load job with REPLAY submission 
+   * policy in a RoundRobinUserResolver mode by using 3 minutes trace file. 
+   * Verify each Gridmix job history with a corresponding job story in 
+   * a trace after completion of all the jobs execution.  
+   * @throws Exception - if an error occurs.
+   */
+  @Test
+  public void testGridmixWith3minTrace() throws Exception {
+    final long inputSize = cSize * 200;
+    String [] runtimeValues ={"LOADJOB",
+       RoundRobinUserResolver.class.getName(),
+       "REPLAY",
+       inputSize+"m",
+       "file://" + UtilsForGridmix.getProxyUsersFile(conf),
+       map.get("3m")};
+
+    String [] otherArgs = {
+      "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE +
+      "=false",
+      "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE +
+      "=false"
+    };
+
+    String tracePath = map.get("3m");
+    runGridmixAndVerify(runtimeValues, otherArgs, tracePath);  
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.mapred.gridmix.test.system.GridMixConfig;
+import org.junit.Test;
+
+
+/**
+ * Run the Gridmix with 5 minutes MR jobs trace and 
+ * verify each job history against the corresponding job story 
+ * in a given trace file.
+ */
+public class TestGridmixWith5minTrace extends GridmixSystemTestCase {
+  private static final Log LOG = LogFactory.
+     getLog(TestGridmixWith5minTrace.class);
+
+  /**
+   * Generate data and run gridmix by load job with SERIAL submission 
+   * policy in a SubmitterUserResolver mode against 5 minutes trace file. 
+   * Verify each Gridmix job history with a corresponding job story 
+   * in a trace file after completion of all the jobs.  
+   * @throws Exception - if an error occurs.
+   */
+  @Test
+  public void testGridmixWith5minTrace() throws Exception {
+    final long inputSize = cSize * 300;
+    final long minFileSize = 100 * 1024 * 1024;
+    String [] runtimeValues ={"LOADJOB",
+       SubmitterUserResolver.class.getName(),
+       "SERIAL",
+       inputSize + "m",
+       map.get("5m")};
+    
+    String [] otherArgs = {
+      "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE +
+      "=false",
+      "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE +
+      "=false",
+      "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + 
+      "=" + minFileSize
+    };
+
+    String tracePath = map.get("5m");
+    runGridmixAndVerify(runtimeValues, otherArgs, tracePath);
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.gridmix;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.mapred.gridmix.test.system.GridMixConfig;
+import org.junit.Test;
+
+/**
+ * Run the Gridmix with 7 minutes MR jobs trace and 
+ * verify each job history against the corresponding job story 
+ * in a given trace file.
+ */
+public class TestGridmixWith7minTrace extends GridmixSystemTestCase {
+  private static final Log LOG = LogFactory.
+      getLog(TestGridmixWith7minTrace.class);
+  
+  /**
+   * Generate data and run gridmix by sleep job with STRESS submission 
+   * policy in a SubmitterUserResolver mode against 7 minute trace file.
+   * Verify each Gridmix job history with a corresponding job story 
+   * in a trace file after completion of all the jobs execution.
+   * @throws Exception - if an error occurs.
+   */
+  @Test
+  public void testGridmixWith7minTrace() throws Exception {
+    final long inputSize = cSize * 400;
+    final long minFileSize = 200 * 1024 * 1024;
+    String [] runtimeValues ={"SLEEPJOB",
+       SubmitterUserResolver.class.getName(),
+       "STRESS",
+       inputSize+"m",
+       map.get("7m")};
+    String [] otherArgs = {
+       "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE +
+       "=false",
+       "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE +
+       "=false",
+       "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + 
+       "=" + minFileSize,
+       "-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + 
+       "=false"};
+    String tracePath = map.get("7m");
+    runGridmixAndVerify(runtimeValues, otherArgs, tracePath);
+  }
+}

Modified: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridMixConfig.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridMixConfig.java?rev=1079264&r1=1079263&r2=1079264&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridMixConfig.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridMixConfig.java Tue Mar  8 06:01:53 2011
@@ -74,6 +74,30 @@ public class GridMixConfig {
       "gridmix.min.file.size";
 
   /**
+   * Gridmix sleep job map task only.
+   */
+  public static final String GRIDMIX_SLEEPJOB_MAPTASK_ONLY =
+      "gridmix.sleep.maptask-only";
+
+  /**
+   * Gridmix sleep map maximum time.
+   */
+  public static final String GRIDMIX_SLEEP_MAP_MAX_TIME =
+      "gridmix.sleep.max-map-time";
+
+  /**
+   * Gridmix sleep reduce maximum time.
+   */
+  public static final String GRIDMIX_SLEEP_REDUCE_MAX_TIME =
+    "gridmix.sleep.max-reduce-time";
+
+  /**
+   * Gridmix key fraction.
+   */
+  public static final String GRIDMIX_KEY_FRC =
+    "gridmix.key.fraction";
+
+  /**
    * Gridmix compression enable
    */
   public static final String GRIDMIX_COMPRESSION_ENABLE =

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobStory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobStory.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobStory.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobStory.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix.test.system;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.tools.rumen.ZombieJobProducer;
+import org.apache.hadoop.tools.rumen.ZombieJob;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Build the job stories with a given trace file. 
+ */
+public class GridmixJobStory {
+  private static Log LOG = LogFactory.getLog(GridmixJobStory.class);
+  private Path path;
+  private Map<JobID,ZombieJob> zombieJobs;
+  private Configuration conf;
+  
+  public GridmixJobStory(Path path, Configuration conf) {
+    this.path = path;
+    this.conf = conf;
+    try {
+       zombieJobs = buildJobStories();
+       if(zombieJobs == null) {
+          throw new NullPointerException("No jobs found in a " +
+              " given trace file.");
+       }
+    } catch (IOException ioe) {
+      LOG.warn("Error:" + ioe.getMessage());
+    } catch (NullPointerException npe) {
+      LOG.warn("Error:" + npe.getMessage());
+    }
+  }
+  
+  /**
+   * Get the zombie jobs as a map.
+   * @return the zombie jobs map.
+   */
+  public Map<JobID, ZombieJob> getZombieJobs() {
+    return zombieJobs;
+  }
+  
+  /**
+   * Get the zombie job of a given job id.
+   * @param jobId - gridmix job id.
+   * @return - the zombie job object.
+   */
+  public ZombieJob getZombieJob(JobID jobId) {
+    return zombieJobs.get(jobId);
+  }
+  
+  private Map<JobID,ZombieJob> buildJobStories() throws IOException {
+    ZombieJobProducer zjp = new ZombieJobProducer(path,null, conf);
+    Map<JobID, ZombieJob> hm = new HashMap<JobID, ZombieJob>();
+    ZombieJob zj = zjp.getNextJob();
+    while (zj != null) {
+      hm.put(zj.getJobID(),zj);
+      zj = zjp.getNextJob();
+    }
+    if (hm.size() == 0) {
+      return null;
+    } else {
+      return hm;
+    }
+  }
+}
\ No newline at end of file

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobSubmission.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobSubmission.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobSubmission.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobSubmission.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,70 @@
+package org.apache.hadoop.mapred.gridmix.test.system;
+
+import java.io.IOException;
+import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.test.system.JTClient;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.junit.Assert;
+
+/**
+ * Submit the gridmix jobs. 
+ */
+public class GridmixJobSubmission {
+  private static final Log LOG = LogFactory.
+     getLog(GridmixJobSubmission.class);
+  private int gridmixJobCount;
+  private Configuration conf;
+  private Path gridmixDir;
+  private JTClient jtClient;
+
+  public GridmixJobSubmission(Configuration conf,JTClient jtClient , 
+    Path gridmixDir) {
+    this.conf = conf;
+    this.jtClient = jtClient;
+    this.gridmixDir = gridmixDir;
+  }
+  
+  /**
+   * Submit the gridmix jobs.
+   * @param runtimeArgs - gridmix common runtime arguments.
+   * @param otherArgs - gridmix other runtime arguments.
+   * @param traceInterval - trace time interval.
+   * @throws Exception
+   */
+  public void submitJobs(String [] runtimeArgs,
+     String [] otherArgs) throws Exception {
+    int prvJobCount = jtClient.getClient().getAllJobs().length;
+    int exitCode = -1;
+    if (otherArgs == null) {
+      exitCode = UtilsForGridmix.runGridmixJob(gridmixDir,
+          conf, GridMixRunMode.DATA_GENERATION_AND_RUN_GRIDMIX,
+          runtimeArgs);
+    } else {
+      exitCode = UtilsForGridmix.runGridmixJob(gridmixDir,
+          conf, GridMixRunMode.DATA_GENERATION_AND_RUN_GRIDMIX,
+          runtimeArgs, otherArgs);
+    }
+    Assert.assertEquals("Gridmix jobs have failed.", 0 , exitCode);
+    gridmixJobCount = jtClient.getClient().getAllJobs().length;
+    gridmixJobCount -= prvJobCount;
+  }
+  /**
+   * Get the submitted jobs count.
+   * @return count of no. of jobs submitted for a trace.
+   */
+  public int getGridmixJobCount() {
+     return gridmixJobCount;
+  }
+  /**
+   * Get the job configuration.
+   * @return Configuration of a submitted job.
+   */
+  public Configuration getJobConf() {
+    return conf;
+  }
+}

Added: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java?rev=1079264&view=auto
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java (added)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java Tue Mar  8 06:01:53 2011
@@ -0,0 +1,256 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.gridmix.test.system;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Set;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.test.system.JTClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.tools.rumen.LoggedJob;
+import org.apache.hadoop.tools.rumen.ZombieJob;
+import org.apache.hadoop.tools.rumen.TaskInfo;
+import org.junit.Assert;
+import java.text.ParseException;
+import org.apache.hadoop.security.UserGroupInformation;
+/**
+ * Verifying each Gridmix job with corresponding job story in a trace file.
+ */
+public class GridmixJobVerification {
+
+  private static Log LOG = LogFactory.getLog(GridmixJobVerification.class);
+  private Path path;
+  private Configuration conf;
+  private JTClient jtClient;
+  /**
+   * Gridmix job verification constructor
+   * @param path - path of the gridmix output directory.
+   * @param conf - cluster configuration.
+   * @param jtClient - jobtracker client.
+   */
+  public GridmixJobVerification(Path path, Configuration conf, 
+     JTClient jtClient) {
+    this.path = path;
+    this.conf = conf;
+    this.jtClient = jtClient;
+  }
+  
+  /**
+   * It verifies the Gridmix jobs with corresponding job story in a trace file.
+   * @param jobids - gridmix job ids.
+   * @throws IOException - if an I/O error occurs.
+   * @throws ParseException - if an parse error occurs.
+   */
+  public void verifyGridmixJobsWithJobStories(List<JobID> jobids) 
+      throws IOException, ParseException {
+
+    List<Long> origSubmissionTime = new ArrayList<Long>();
+    List<Long> simuSubmissionTime = new ArrayList<Long>();
+    GridmixJobStory gjs = new GridmixJobStory(path, conf);
+    final Iterator<JobID> ite = jobids.iterator();
+    java.io.File destFolder = new java.io.File("/tmp/gridmix-st/");
+
+    destFolder.mkdir();
+    while (ite.hasNext()) {
+      long expMapInputBytes = 0;
+      long expMapOutputBytes = 0;
+      long expMapInputRecs = 0;
+      long expMapOutputRecs = 0;
+      long expReduceInputBytes = 0;
+      long expReduceOutputBytes = 0;
+      long expReduceInputRecs = 0;
+      long expReduceOutputRecs = 0;
+      
+      JobID currJobId = ite.next();
+      String historyFilePath = jtClient.getProxy().
+          getJobHistoryLocationForRetiredJob(currJobId);
+      Path jhpath = new Path(historyFilePath);
+      FileSystem fs = jhpath.getFileSystem(conf);
+      JobHistoryParser jhparser = new JobHistoryParser(fs, jhpath);
+      JobHistoryParser.JobInfo jhInfo = jhparser.parse();
+      Counters counters = jhInfo.getTotalCounters();
+
+      fs.copyToLocalFile(jhpath,new Path(destFolder.toString()));
+      fs.copyToLocalFile(new Path(historyFilePath + "_conf.xml"), 
+          new Path(destFolder.toString()));
+      JobConf jobConf = new JobConf(conf);
+      jobConf.addResource(new Path("/tmp/gridmix-st/" + 
+          currJobId + "_conf.xml"));
+      String origJobId = jobConf.get("gridmix.job.original-job-id");
+      LOG.info("OriginalJobID<->CurrentJobID:" + 
+          origJobId + "<->" + currJobId);
+
+      ZombieJob zombieJob = gjs.getZombieJob(JobID.forName(origJobId));
+      LoggedJob loggedJob = zombieJob.getLoggedJob();
+      
+      for (int index = 0; index < zombieJob.getNumberMaps(); index ++) {
+        TaskInfo mapTask = zombieJob.getTaskInfo(TaskType.MAP, index);
+        expMapInputBytes += mapTask.getInputBytes();
+        expMapOutputBytes += mapTask.getOutputBytes();
+        expMapInputRecs += mapTask.getInputRecords();
+        expMapOutputRecs += mapTask.getOutputRecords();
+      }
+
+      for (int index = 0; index < zombieJob.getNumberReduces(); index ++) {
+        TaskInfo reduceTask = zombieJob.getTaskInfo(TaskType.REDUCE, index);
+        expReduceInputBytes += reduceTask.getInputBytes();
+        expReduceOutputBytes += reduceTask.getOutputBytes();
+        expReduceInputRecs += reduceTask.getInputRecords();
+        expReduceOutputRecs += reduceTask.getOutputRecords();
+      }
+
+      LOG.info("Verifying the job <" + currJobId + "> and wait for a while...");
+      Assert.assertEquals("Job id has not matched",
+          zombieJob.getJobID(), JobID.forName(origJobId));
+
+      Assert.assertEquals("Job maps have not matched", 
+          zombieJob.getNumberMaps(), 
+          jhInfo.getTotalMaps());
+
+      if (!jobConf.getBoolean("gridmix.sleep.maptask-only",false)) {
+        Assert.assertEquals("Job reducers have not matched",
+            zombieJob.getNumberReduces(), jhInfo.getTotalReduces());
+      } else {
+        Assert.assertEquals("Job reducers have not matched",
+            0, jhInfo.getTotalReduces());
+      }
+
+      Assert.assertEquals("Job status has not matched.", 
+          zombieJob.getOutcome().name(), 
+          convertJobStatus(jhInfo.getJobStatus()));
+
+      Assert.assertEquals("Job priority has not matched.", 
+         loggedJob.getPriority().toString(), jhInfo.getPriority());
+
+      if (jobConf.get("gridmix.user.resolve.class").contains("RoundRobin")) {
+         Assert.assertTrue(currJobId + "has not impersonate with other user.",
+             !jhInfo.getUsername().equals(UserGroupInformation.
+             getLoginUser().getShortUserName()));
+      }
+
+      if (jobConf.get("gridmix.job-submission.policy").contains("REPLAY")) {
+        origSubmissionTime.add(zombieJob.getSubmissionTime()); 
+        simuSubmissionTime.add(jhInfo.getSubmitTime());
+      }
+
+      if (!jobConf.get("gridmix.job.type", "LOADJOB").equals("SLEEPJOB")) {
+        
+        //The below statements have commented due to a bug(MAPREDUCE-2135).
+      /*  Assert.assertTrue("Map input bytes have not matched.<exp:[" + 
+            convertBytes(expMapInputBytes) +"]><act:[" + 
+            convertBytes(getCounterValue(counters,"HDFS_BYTES_READ")) + "]>", 
+            convertBytes(expMapInputBytes).equals( 
+            convertBytes(getCounterValue(counters,"HDFS_BYTES_READ"))));
+
+        Assert.assertTrue("Map output bytes has not matched.<exp:[" +
+            convertBytes(expMapOutputBytes) + "]><act:[" +
+            convertBytes(getCounterValue(counters, "MAP_OUTPUT_BYTES")) + "]>", 
+            convertBytes(expMapOutputBytes).equals( 
+            convertBytes(getCounterValue(counters, "MAP_OUTPUT_BYTES"))));*/
+
+        Assert.assertEquals("Map input records have not matched.<exp:[" +
+            expMapInputRecs + "]><act:[" +
+            getCounterValue(counters, "MAP_INPUT_RECORDS") + "]>", 
+            expMapInputRecs, getCounterValue(counters, "MAP_INPUT_RECORDS"));
+
+        // The below statements have commented due to a bug(MAPREDUCE-2154).
+        /*Assert.assertEquals("Map output records have not matched.<exp:[" +
+            expMapOutputRecs + "]><act:[" +
+            getCounterValue(counters, "MAP_OUTPUT_RECORDS") + "]>", 
+            expMapOutputRecs, getCounterValue(counters, "MAP_OUTPUT_RECORDS"));*/
+
+        /*Assert.assertTrue("Reduce input bytes have not matched.<exp:[" +
+            convertBytes(expReduceInputBytes) + "]><act:[" +
+            convertBytes(getCounterValue(counters,"REDUCE_SHUFFLE_BYTES")) + "]>", 
+            convertBytes(expReduceInputBytes).equals( 
+            convertBytes(getCounterValue(counters,"REDUCE_SHUFFLE_BYTES"))));*/ 
+
+        /*Assert.assertEquals("Reduce output bytes have not matched.<exp:[" + 
+            convertBytes(expReduceOutputBytes) + "]><act:[" +
+            convertBytes(getCounterValue(counters,"HDFS_BYTES_WRITTEN")) + "]>", 
+            convertBytes(expReduceOutputBytes).equals( 
+            convertBytes(getCounterValue(counters,"HDFS_BYTES_WRITTEN"))));*/
+
+        /*Assert.assertEquals("Reduce output records have not matched.<exp:[" + 
+            expReduceOutputRecs + "]><act:[" + getCounterValue(counters,
+            "REDUCE_OUTPUT_RECORDS") + "]>", 
+            expReduceOutputRecs, getCounterValue(counters,
+            "REDUCE_OUTPUT_RECORDS"));*/ 
+ 
+         /*Assert.assertEquals("Reduce input records have not matched.<exp:[" + 
+            expReduceInputRecs + "]><act:[" + getCounterValue(counters,
+            "REDUCE_INPUT_RECORDS") + "]>",
+            expReduceInputRecs, 
+            getCounterValue(counters,"REDUCE_INPUT_RECORDS"));*/
+        LOG.info("Done.");
+      }
+    }
+  }
+
+  private String convertJobStatus(String jobStatus) {
+    if (jobStatus.equals("SUCCEEDED")) { 
+      return "SUCCESS";
+    } else {
+      return jobStatus;
+    }
+  }
+  
+  private String convertBytes(long bytesValue) {
+    int units = 1024;
+    if( bytesValue < units ) {
+      return String.valueOf(bytesValue)+ "B";
+    } else {
+      int exp = (int)(Math.log(bytesValue) / Math.log(units));
+      return String.format("%1d%sB",(long)(bytesValue / Math.pow(units, exp)), 
+          "KMGTPE".charAt(exp -1));
+    }
+  }
+ 
+
+  private long getCounterValue(Counters counters,String key) 
+     throws ParseException { 
+    for (String groupName : counters.getGroupNames()) {
+       CounterGroup totalGroup = counters.getGroup(groupName);
+       Iterator<Counter> itrCounter = totalGroup.iterator();
+       while (itrCounter.hasNext()) {
+         Counter counter = itrCounter.next();
+         if (counter.getName().equals(key)) {
+           return counter.getValue();
+         }
+       }
+    }
+    return 0;
+  }
+}

Modified: hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/UtilsForGridmix.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/UtilsForGridmix.java?rev=1079264&r1=1079263&r2=1079264&view=diff
==============================================================================
--- hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/UtilsForGridmix.java (original)
+++ hadoop/mapreduce/branches/yahoo-merge/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/UtilsForGridmix.java Tue Mar  8 06:01:53 2011
@@ -27,9 +27,13 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.mapred.gridmix.Gridmix;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapreduce.JobID;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Arrays;
 import java.net.URI;
 import java.text.SimpleDateFormat;
@@ -246,6 +250,46 @@ public class UtilsForGridmix {
         return fileName;
      }
   }
+  
+  /**
+  * List the current gridmix jobid's.
+  * @param client - job client.
+  * @param execJobCount - number of executed jobs.
+  * @return - list of gridmix jobid's.
+  */
+ public static List<JobID> listGridmixJobIDs(JobClient client,
+     int execJobCount) throws IOException {
+   List<JobID> jobids = new ArrayList<JobID>();
+   for (int index = 1; index <= execJobCount; index++) {
+     JobID jobid = client.getAllJobs()[client.getAllJobs().
+        length - index].getJobID();
+     RunningJob runJob = client.getJob(jobid.toString());
+     if (!runJob.getJobName().equals("GRIDMIX_GENERATE_INPUT_DATA")) {
+       jobids.add(jobid);
+     }
+   }
+   return (jobids.size() == 0)? null : jobids;
+ }
+
+ /**
+  * List the proxy users. 
+  * @param conf
+  * @return
+  * @throws Exception
+  */
+ public static List<String> listProxyUsers(Configuration conf,
+     String loginUser) throws Exception {
+   List<String> proxyUsers = new ArrayList<String>();
+   ProxyUserDefinitions pud = getProxyUsersData(conf);
+   Map<String, GroupsAndHost> usersData = pud.getProxyUsers();
+   Collection users = usersData.keySet();
+   Iterator<String> itr = users.iterator();
+   while (itr.hasNext()) {
+     String user = itr.next();
+     if (!user.equals(loginUser)){ proxyUsers.add(user); };
+   }
+   return proxyUsers;
+ }
 
   private static String buildProxyUsersFile(final Map<String, GroupsAndHost> 
       proxyUserData) throws Exception {