You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by ro...@apache.org on 2013/06/11 18:56:41 UTC

svn commit: r1491873 - in /oozie/trunk: ./ core/src/test/java/org/apache/hadoop/ core/src/test/java/org/apache/hadoop/examples/ core/src/test/java/org/apache/oozie/action/hadoop/ core/src/test/java/org/apache/oozie/command/coord/ core/src/test/java/org...

Author: rohini
Date: Tue Jun 11 16:56:40 2013
New Revision: 1491873

URL: http://svn.apache.org/r1491873
Log:
OOZIE-1374 Make all unit tests run with Hadoop 2 (rohini)

Added:
    oozie/trunk/core/src/test/java/org/apache/hadoop/
    oozie/trunk/core/src/test/java/org/apache/hadoop/examples/
    oozie/trunk/core/src/test/java/org/apache/hadoop/examples/SleepJob.java
Modified:
    oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
    oozie/trunk/core/src/test/java/org/apache/oozie/command/coord/TestCoordPushDependencyCheckXCommand.java
    oozie/trunk/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
    oozie/trunk/core/src/test/java/org/apache/oozie/test/MiniHCatServer.java
    oozie/trunk/core/src/test/java/org/apache/oozie/test/XTestCase.java
    oozie/trunk/hadooplibs/hadoop-0.23/pom.xml
    oozie/trunk/hadooplibs/hadoop-2/pom.xml
    oozie/trunk/hadooplibs/hadoop-distcp-0.23/pom.xml
    oozie/trunk/hadooplibs/hadoop-distcp-2/pom.xml
    oozie/trunk/hadooplibs/hadoop-test-0.23/pom.xml
    oozie/trunk/hadooplibs/hadoop-test-2/pom.xml
    oozie/trunk/minitest/pom.xml
    oozie/trunk/pom.xml
    oozie/trunk/release-log.txt
    oozie/trunk/sharelib/pig/pom.xml
    oozie/trunk/tools/pom.xml

Added: oozie/trunk/core/src/test/java/org/apache/hadoop/examples/SleepJob.java
URL: http://svn.apache.org/viewvc/oozie/trunk/core/src/test/java/org/apache/hadoop/examples/SleepJob.java?rev=1491873&view=auto
==============================================================================
--- oozie/trunk/core/src/test/java/org/apache/hadoop/examples/SleepJob.java (added)
+++ oozie/trunk/core/src/test/java/org/apache/hadoop/examples/SleepJob.java Tue Jun 11 16:56:40 2013
@@ -0,0 +1,241 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.examples;
+
+import java.io.IOException;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.util.Iterator;
+import java.util.Random;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * Dummy class for testing MR framefork. Sleeps for a defined period 
+ * of time in mapper and reducer. Generates fake input for map / reduce 
+ * jobs. Note that generated number of input pairs is in the order 
+ * of <code>numMappers * mapSleepTime / 100</code>, so the job uses
+ * some disk space.
+ */
+public class SleepJob extends Configured implements Tool,  
+             Mapper<IntWritable, IntWritable, IntWritable, NullWritable>,
+             Reducer<IntWritable, NullWritable, NullWritable, NullWritable>,
+             Partitioner<IntWritable,NullWritable> {
+
+  private long mapSleepDuration = 100;
+  private long reduceSleepDuration = 100;
+  private int mapSleepCount = 1;
+  private int reduceSleepCount = 1;
+  private int count = 0;
+
+  public int getPartition(IntWritable k, NullWritable v, int numPartitions) {
+    return k.get() % numPartitions;
+  }
+  
+  public static class EmptySplit implements InputSplit {
+    public void write(DataOutput out) throws IOException { }
+    public void readFields(DataInput in) throws IOException { }
+    public long getLength() { return 0L; }
+    public String[] getLocations() { return new String[0]; }
+  }
+
+  public static class SleepInputFormat extends Configured
+      implements InputFormat<IntWritable,IntWritable> {
+    public InputSplit[] getSplits(JobConf conf, int numSplits) {
+      InputSplit[] ret = new InputSplit[numSplits];
+      for (int i = 0; i < numSplits; ++i) {
+        ret[i] = new EmptySplit();
+      }
+      return ret;
+    }
+    public RecordReader<IntWritable,IntWritable> getRecordReader(
+        InputSplit ignored, JobConf conf, Reporter reporter)
+        throws IOException {
+      final int count = conf.getInt("sleep.job.map.sleep.count", 1);
+      if (count < 0) throw new IOException("Invalid map count: " + count);
+      final int redcount = conf.getInt("sleep.job.reduce.sleep.count", 1);
+      if (redcount < 0)
+        throw new IOException("Invalid reduce count: " + redcount);
+      final int emitPerMapTask = (redcount * conf.getNumReduceTasks());
+    return new RecordReader<IntWritable,IntWritable>() {
+        private int records = 0;
+        private int emitCount = 0;
+
+        public boolean next(IntWritable key, IntWritable value)
+            throws IOException {
+          key.set(emitCount);
+          int emit = emitPerMapTask / count;
+          if ((emitPerMapTask) % count > records) {
+            ++emit;
+          }
+          emitCount += emit;
+          value.set(emit);
+          return records++ < count;
+        }
+        public IntWritable createKey() { return new IntWritable(); }
+        public IntWritable createValue() { return new IntWritable(); }
+        public long getPos() throws IOException { return records; }
+        public void close() throws IOException { }
+        public float getProgress() throws IOException {
+          return records / ((float)count);
+        }
+      };
+    }
+  }
+
+  public void map(IntWritable key, IntWritable value,
+      OutputCollector<IntWritable, NullWritable> output, Reporter reporter)
+      throws IOException {
+
+    //it is expected that every map processes mapSleepCount number of records. 
+    try {
+      reporter.setStatus("Sleeping... (" +
+          (mapSleepDuration * (mapSleepCount - count)) + ") ms left");
+      Thread.sleep(mapSleepDuration);
+    }
+    catch (InterruptedException ex) {
+      throw (IOException)new IOException(
+          "Interrupted while sleeping").initCause(ex);
+    }
+    ++count;
+    // output reduceSleepCount * numReduce number of random values, so that
+    // each reducer will get reduceSleepCount number of keys.
+    int k = key.get();
+    for (int i = 0; i < value.get(); ++i) {
+      output.collect(new IntWritable(k + i), NullWritable.get());
+    }
+  }
+
+  public void reduce(IntWritable key, Iterator<NullWritable> values,
+      OutputCollector<NullWritable, NullWritable> output, Reporter reporter)
+      throws IOException {
+    try {
+      reporter.setStatus("Sleeping... (" +
+          (reduceSleepDuration * (reduceSleepCount - count)) + ") ms left");
+        Thread.sleep(reduceSleepDuration);
+      
+    }
+    catch (InterruptedException ex) {
+      throw (IOException)new IOException(
+          "Interrupted while sleeping").initCause(ex);
+    }
+    count++;
+  }
+
+  public void configure(JobConf job) {
+    this.mapSleepCount =
+      job.getInt("sleep.job.map.sleep.count", mapSleepCount);
+    this.reduceSleepCount =
+      job.getInt("sleep.job.reduce.sleep.count", reduceSleepCount);
+    this.mapSleepDuration =
+      job.getLong("sleep.job.map.sleep.time" , 100) / mapSleepCount;
+    this.reduceSleepDuration =
+      job.getLong("sleep.job.reduce.sleep.time" , 100) / reduceSleepCount;
+  }
+
+  public void close() throws IOException {
+  }
+
+  public static void main(String[] args) throws Exception{
+    int res = ToolRunner.run(new Configuration(), new SleepJob(), args);
+    System.exit(res);
+  }
+
+  public int run(int numMapper, int numReducer, long mapSleepTime,
+      int mapSleepCount, long reduceSleepTime,
+      int reduceSleepCount) throws IOException {
+    JobConf job = setupJobConf(numMapper, numReducer, mapSleepTime, 
+                  mapSleepCount, reduceSleepTime, reduceSleepCount);
+    JobClient.runJob(job);
+    return 0;
+  }
+
+  public JobConf setupJobConf(int numMapper, int numReducer, 
+                                long mapSleepTime, int mapSleepCount, 
+                                long reduceSleepTime, int reduceSleepCount) {
+    JobConf job = new JobConf(getConf(), SleepJob.class);
+    job.setNumMapTasks(numMapper);
+    job.setNumReduceTasks(numReducer);
+    job.setMapperClass(SleepJob.class);
+    job.setMapOutputKeyClass(IntWritable.class);
+    job.setMapOutputValueClass(NullWritable.class);
+    job.setReducerClass(SleepJob.class);
+    job.setOutputFormat(NullOutputFormat.class);
+    job.setInputFormat(SleepInputFormat.class);
+    job.setPartitionerClass(SleepJob.class);
+    job.setSpeculativeExecution(false);
+    job.setJobName("Sleep job");
+    FileInputFormat.addInputPath(job, new Path("ignored"));
+    job.setLong("sleep.job.map.sleep.time", mapSleepTime);
+    job.setLong("sleep.job.reduce.sleep.time", reduceSleepTime);
+    job.setInt("sleep.job.map.sleep.count", mapSleepCount);
+    job.setInt("sleep.job.reduce.sleep.count", reduceSleepCount);
+    return job;
+  }
+
+  public int run(String[] args) throws Exception {
+
+    if(args.length < 1) {
+      System.err.println("SleepJob [-m numMapper] [-r numReducer]" +
+          " [-mt mapSleepTime (msec)] [-rt reduceSleepTime (msec)]" +
+          " [-recordt recordSleepTime (msec)]");
+      ToolRunner.printGenericCommandUsage(System.err);
+      return -1;
+    }
+
+    int numMapper = 1, numReducer = 1;
+    long mapSleepTime = 100, reduceSleepTime = 100, recSleepTime = 100;
+    int mapSleepCount = 1, reduceSleepCount = 1;
+
+    for(int i=0; i < args.length; i++ ) {
+      if(args[i].equals("-m")) {
+        numMapper = Integer.parseInt(args[++i]);
+      }
+      else if(args[i].equals("-r")) {
+        numReducer = Integer.parseInt(args[++i]);
+      }
+      else if(args[i].equals("-mt")) {
+        mapSleepTime = Long.parseLong(args[++i]);
+      }
+      else if(args[i].equals("-rt")) {
+        reduceSleepTime = Long.parseLong(args[++i]);
+      }
+      else if (args[i].equals("-recordt")) {
+        recSleepTime = Long.parseLong(args[++i]);
+      }
+    }
+    
+    // sleep for *SleepTime duration in Task by recSleepTime per record
+    mapSleepCount = (int)Math.ceil(mapSleepTime / ((double)recSleepTime));
+    reduceSleepCount = (int)Math.ceil(reduceSleepTime / ((double)recSleepTime));
+    
+    return run(numMapper, numReducer, mapSleepTime, mapSleepCount,
+        reduceSleepTime, reduceSleepCount);
+  }
+
+}

Modified: oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
URL: http://svn.apache.org/viewvc/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java (original)
+++ oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java Tue Jun 11 16:56:40 2013
@@ -1534,7 +1534,7 @@ public class TestJavaActionExecutor exte
         Path archiveFullPath = new Path(appPath, archivePath);
         ae.addToCache(conf, appPath, archiveFullPath.toString(), true);
         assertTrue(conf.get("mapred.cache.archives").contains(archiveFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test archive with fragment
         Path archiveFragmentPath = new Path("test.jar#a.jar");
@@ -1542,7 +1542,7 @@ public class TestJavaActionExecutor exte
         conf.clear();
         ae.addToCache(conf, appPath, archiveFragmentFullPath.toString(), true);
         assertTrue(conf.get("mapred.cache.archives").contains(archiveFragmentFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test .so without fragment
         Path appSoPath = new Path("lib/a.so");
@@ -1550,7 +1550,7 @@ public class TestJavaActionExecutor exte
         conf.clear();
         ae.addToCache(conf, appPath, appSoFullPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(appSoFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test .so with fragment
         Path appSoFragmentPath = new Path("lib/a.so#a.so");
@@ -1558,7 +1558,7 @@ public class TestJavaActionExecutor exte
         conf.clear();
         ae.addToCache(conf, appPath, appSoFragmentFullPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(appSoFragmentFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test .jar without fragment
         Path appJarPath = new Path("lib/a.jar");
@@ -1567,7 +1567,7 @@ public class TestJavaActionExecutor exte
         conf.set(WorkflowAppService.HADOOP_USER, getTestUser());
         ae.addToCache(conf, appPath, appJarFullPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(appJarFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test .jar with fragment
         Path appJarFragmentPath = new Path("lib/a.jar#a.jar");
@@ -1576,7 +1576,7 @@ public class TestJavaActionExecutor exte
         conf.set(WorkflowAppService.HADOOP_USER, getTestUser());
         ae.addToCache(conf, appPath, appJarFragmentFullPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(appJarFragmentFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test regular file without fragment
         Path appFilePath = new Path("lib/a.txt");
@@ -1584,7 +1584,7 @@ public class TestJavaActionExecutor exte
         conf.clear();
         ae.addToCache(conf, appPath, appFileFullPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(appFileFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test regular file with fragment
         Path appFileFragmentPath = new Path("lib/a.txt#a.txt");
@@ -1592,44 +1592,44 @@ public class TestJavaActionExecutor exte
         conf.clear();
         ae.addToCache(conf, appPath, appFileFragmentFullPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(appFileFragmentFullPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test path starting with "/" for archive
         Path testPath = new Path("/tmp/testpath/a.jar#a.jar");
         conf.clear();
         ae.addToCache(conf, appPath, testPath.toString(), true);
         assertTrue(conf.get("mapred.cache.archives").contains(testPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test path starting with "/" for cache.file
         conf.clear();
         ae.addToCache(conf, appPath, testPath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(testPath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test absolute path for archive
         Path testAbsolutePath = new Path("hftp://namenode.test.com:8020/tmp/testpath/a.jar#a.jar");
         conf.clear();
         ae.addToCache(conf, appPath, testAbsolutePath.toString(), true);
         assertTrue(conf.get("mapred.cache.archives").contains(testAbsolutePath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test absolute path for cache files
         conf.clear();
         ae.addToCache(conf, appPath, testAbsolutePath.toString(), false);
         assertTrue(conf.get("mapred.cache.files").contains(testAbsolutePath.toString()));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test relative path for archive
         conf.clear();
         ae.addToCache(conf, appPath, "lib/a.jar#a.jar", true);
         assertTrue(conf.get("mapred.cache.archives").contains(appUri.getPath() + "/lib/a.jar#a.jar"));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
 
         // test relative path for cache files
         conf.clear();
         ae.addToCache(conf, appPath, "lib/a.jar#a.jar", false);
         assertTrue(conf.get("mapred.cache.files").contains(appUri.getPath() + "/lib/a.jar#a.jar"));
-        assertTrue(conf.get("mapred.create.symlink").contains("yes"));
+        assertTrue(DistributedCache.getSymlink(conf));
     }
 }

Modified: oozie/trunk/core/src/test/java/org/apache/oozie/command/coord/TestCoordPushDependencyCheckXCommand.java
URL: http://svn.apache.org/viewvc/oozie/trunk/core/src/test/java/org/apache/oozie/command/coord/TestCoordPushDependencyCheckXCommand.java?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/core/src/test/java/org/apache/oozie/command/coord/TestCoordPushDependencyCheckXCommand.java (original)
+++ oozie/trunk/core/src/test/java/org/apache/oozie/command/coord/TestCoordPushDependencyCheckXCommand.java Tue Jun 11 16:56:40 2013
@@ -267,7 +267,8 @@ public class TestCoordPushDependencyChec
         checkDependencies(actionId, newHCatDependency + CoordELFunctions.INSTANCE_SEPARATOR + newHCatDependency3,
                 newHCatDependency1);
         new CoordPushDependencyCheckXCommand(actionId).call();
-        Thread.sleep(300);
+        // Somehow with hive 0.10 it takes 1 second more.
+        Thread.sleep(1300);
 
         checkDependencies(actionId, newHCatDependency3, "");
         assertNull(pdms.getWaitingActions(new HCatURI(newHCatDependency1)));

Modified: oozie/trunk/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
URL: http://svn.apache.org/viewvc/oozie/trunk/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java (original)
+++ oozie/trunk/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java Tue Jun 11 16:56:40 2013
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,8 +17,13 @@
  */
 package org.apache.oozie.command.wf;
 
+import java.net.URI;
 import java.util.Date;
 
+import org.apache.hadoop.examples.SleepJob;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.WorkflowActionBean;
 import org.apache.oozie.WorkflowJobBean;
 import org.apache.oozie.action.hadoop.MapperReducerForTest;
@@ -27,6 +32,7 @@ import org.apache.oozie.client.WorkflowJ
 import org.apache.oozie.executor.jpa.JPAExecutorException;
 import org.apache.oozie.executor.jpa.WorkflowActionGetJPAExecutor;
 import org.apache.oozie.executor.jpa.WorkflowActionInsertJPAExecutor;
+import org.apache.oozie.service.HadoopAccessorService;
 import org.apache.oozie.service.JPAService;
 import org.apache.oozie.service.Services;
 import org.apache.oozie.service.UUIDService;
@@ -56,23 +62,24 @@ public class TestWorkflowActionKillXComm
      * @throws Exception
      */
     public void testWfActionKillSuccess() throws Exception {
-
+        String externalJobID = launchSleepJob();
         WorkflowJobBean job = this.addRecordToWfJobTable(WorkflowJob.Status.KILLED, WorkflowInstance.Status.KILLED);
-        WorkflowActionBean action = this.addRecordToWfActionTable(job.getId(), "1", WorkflowAction.Status.KILLED);
+        WorkflowActionBean action = this.addRecordToWfActionTable(job.getId(), externalJobID, "1",
+                WorkflowAction.Status.KILLED);
 
         JPAService jpaService = Services.get().get(JPAService.class);
         assertNotNull(jpaService);
         WorkflowActionGetJPAExecutor wfActionGetCmd = new WorkflowActionGetJPAExecutor(action.getId());
 
         action = jpaService.execute(wfActionGetCmd);
-        assertEquals(action.getStatus(), WorkflowAction.Status.KILLED);
-        assertEquals(action.getExternalStatus(), "RUNNING");
+        assertEquals(WorkflowAction.Status.KILLED, action.getStatus());
+        assertEquals("RUNNING", action.getExternalStatus());
 
         new ActionKillXCommand(action.getId()).call();
 
         action = jpaService.execute(wfActionGetCmd);
-        assertEquals(action.getStatus(), WorkflowAction.Status.KILLED);
-        assertEquals(action.getExternalStatus(), "KILLED");
+        assertEquals(WorkflowAction.Status.KILLED, action.getStatus());
+        assertEquals("KILLED", action.getExternalStatus());
     }
 
     /**
@@ -82,9 +89,10 @@ public class TestWorkflowActionKillXComm
      * @throws Exception
      */
     public void testWfActionKillFailed() throws Exception {
-
+        String externalJobID = launchSleepJob();
         WorkflowJobBean job = this.addRecordToWfJobTable(WorkflowJob.Status.RUNNING, WorkflowInstance.Status.RUNNING);
-        WorkflowActionBean action = this.addRecordToWfActionTable(job.getId(), "1", WorkflowAction.Status.RUNNING);
+        WorkflowActionBean action = this.addRecordToWfActionTable(job.getId(), externalJobID, "1",
+                WorkflowAction.Status.RUNNING);
 
         JPAService jpaService = Services.get().get(JPAService.class);
         assertNotNull(jpaService);
@@ -102,8 +110,8 @@ public class TestWorkflowActionKillXComm
         assertEquals(action.getExternalStatus(), "RUNNING");
     }
 
-    @Override
-    protected WorkflowActionBean addRecordToWfActionTable(String wfId, String actionName, WorkflowAction.Status status) throws Exception {
+    protected WorkflowActionBean addRecordToWfActionTable(String wfId, String externalJobID, String actionName,
+            WorkflowAction.Status status) throws Exception {
         WorkflowActionBean action = new WorkflowActionBean();
         action.setId(Services.get().get(UUIDService.class).generateChildId(wfId, actionName));
         action.setJobId(wfId);
@@ -114,7 +122,7 @@ public class TestWorkflowActionKillXComm
         action.setEndTime(new Date());
         action.setLastCheckTime(new Date());
         action.setPending();
-        action.setExternalId("job_201011110000_00000");
+        action.setExternalId(externalJobID);
         action.setExternalStatus("RUNNING");
 
         String actionXml = "<map-reduce>" +
@@ -145,4 +153,17 @@ public class TestWorkflowActionKillXComm
         return action;
     }
 
+    private String launchSleepJob() throws Exception {
+        JobConf jobConf = Services.get().get(HadoopAccessorService.class)
+                .createJobConf(new URI(getNameNodeUri()).getAuthority());
+        JobClient jobClient = createJobClient();
+
+        SleepJob sleepjob = new SleepJob();
+        sleepjob.setConf(jobConf);
+        jobConf = sleepjob.setupJobConf(1, 1, 1000, 1, 1000, 1);
+
+        final RunningJob runningJob = jobClient.submitJob(jobConf);
+        return runningJob.getID().toString();
+    }
+
 }

Modified: oozie/trunk/core/src/test/java/org/apache/oozie/test/MiniHCatServer.java
URL: http://svn.apache.org/viewvc/oozie/trunk/core/src/test/java/org/apache/oozie/test/MiniHCatServer.java?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/core/src/test/java/org/apache/oozie/test/MiniHCatServer.java (original)
+++ oozie/trunk/core/src/test/java/org/apache/oozie/test/MiniHCatServer.java Tue Jun 11 16:56:40 2013
@@ -102,7 +102,7 @@ public class MiniHCatServer {
         hiveConf.set("hive.metastore.local", "true"); // For hive 0.9
         hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:target/metastore_db;create=true");
 
-        setSystemProperty(HiveConf.ConfVars.METASTORE_MODE.varname, "true");
+        setSystemProperty("hive.metastore.local", "true");
         setSystemProperty(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, new File("target/warehouse").getAbsolutePath());
         setSystemProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
                 "jdbc:derby:target/metastore_db;create=true");

Modified: oozie/trunk/core/src/test/java/org/apache/oozie/test/XTestCase.java
URL: http://svn.apache.org/viewvc/oozie/trunk/core/src/test/java/org/apache/oozie/test/XTestCase.java?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/core/src/test/java/org/apache/oozie/test/XTestCase.java (original)
+++ oozie/trunk/core/src/test/java/org/apache/oozie/test/XTestCase.java Tue Jun 11 16:56:40 2013
@@ -29,6 +29,7 @@ import java.net.URL;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -331,7 +332,7 @@ public abstract class XTestCase extends 
 
         if (mrCluster != null) {
             OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"));
-            Configuration conf = mrCluster.createJobConf();
+            Configuration conf = createJobConfFromMRCluster();
             conf.writeXml(os);
             os.close();
         }
@@ -888,6 +889,20 @@ public abstract class XTestCase extends 
         }
     }
 
+    @SuppressWarnings("deprecation")
+    private JobConf createJobConfFromMRCluster() {
+        JobConf jobConf = new JobConf();
+        JobConf jobConfMR = mrCluster.createJobConf();
+        for ( Entry<String, String> entry : jobConfMR) {
+            // MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
+            // TODO call conf.unset after moving completely to Hadoop 2.x
+            if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
+                jobConf.set(entry.getKey(), entry.getValue());
+            }
+        }
+        return jobConf;
+    }
+
     /**
      * Returns a jobconf preconfigured to talk with the test cluster/minicluster.
      * @return a jobconf preconfigured to talk with the test cluster/minicluster.
@@ -895,7 +910,7 @@ public abstract class XTestCase extends 
     protected JobConf createJobConf() {
         JobConf jobConf;
         if (mrCluster != null) {
-            jobConf = mrCluster.createJobConf();
+            jobConf = createJobConfFromMRCluster();
         }
         else {
             jobConf = new JobConf();

Modified: oozie/trunk/hadooplibs/hadoop-0.23/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/hadooplibs/hadoop-0.23/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/hadooplibs/hadoop-0.23/pom.xml (original)
+++ oozie/trunk/hadooplibs/hadoop-0.23/pom.xml Tue Jun 11 16:56:40 2013
@@ -27,7 +27,7 @@
     </parent>
     <groupId>org.apache.oozie</groupId>
     <artifactId>oozie-hadoop</artifactId>
-    <version>0.23.6.oozie-4.1.0-SNAPSHOT</version>
+    <version>0.23.5.oozie-4.1.0-SNAPSHOT</version>
     <description>Apache Oozie Hadoop ${project.version}</description>
     <name>Apache Oozie Hadoop ${project.version}</name>
     <packaging>jar</packaging>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-client</artifactId>
-            <version>0.23.6</version>
+            <version>0.23.5</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

Modified: oozie/trunk/hadooplibs/hadoop-2/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/hadooplibs/hadoop-2/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/hadooplibs/hadoop-2/pom.xml (original)
+++ oozie/trunk/hadooplibs/hadoop-2/pom.xml Tue Jun 11 16:56:40 2013
@@ -27,7 +27,7 @@
     </parent>
     <groupId>org.apache.oozie</groupId>
     <artifactId>oozie-hadoop</artifactId>
-    <version>2.0.2-alpha.oozie-4.1.0-SNAPSHOT</version>
+    <version>2.2.0-SNAPSHOT.oozie-4.1.0-SNAPSHOT</version>
     <description>Apache Oozie Hadoop ${project.version}</description>
     <name>Apache Oozie Hadoop ${project.version}</name>
     <packaging>jar</packaging>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-client</artifactId>
-            <version>2.0.2-alpha</version>
+            <version>2.2.0-SNAPSHOT</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

Modified: oozie/trunk/hadooplibs/hadoop-distcp-0.23/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/hadooplibs/hadoop-distcp-0.23/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/hadooplibs/hadoop-distcp-0.23/pom.xml (original)
+++ oozie/trunk/hadooplibs/hadoop-distcp-0.23/pom.xml Tue Jun 11 16:56:40 2013
@@ -27,7 +27,7 @@
     </parent>
     <groupId>org.apache.oozie</groupId>
     <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>0.23.6.oozie-4.1.0-SNAPSHOT</version>
+    <version>0.23.5.oozie-4.1.0-SNAPSHOT</version>
     <description>Apache Oozie Hadoop Distcp ${project.version}</description>
     <name>Apache Oozie Hadoop Distcp ${project.version}</name>
     <packaging>jar</packaging>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-distcp</artifactId>
-            <version>0.23.6</version>
+            <version>0.23.5</version>
             <scope>compile</scope>
             <exclusions>
                 <exclusion>

Modified: oozie/trunk/hadooplibs/hadoop-distcp-2/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/hadooplibs/hadoop-distcp-2/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/hadooplibs/hadoop-distcp-2/pom.xml (original)
+++ oozie/trunk/hadooplibs/hadoop-distcp-2/pom.xml Tue Jun 11 16:56:40 2013
@@ -27,7 +27,7 @@
     </parent>
     <groupId>org.apache.oozie</groupId>
     <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>2.0.2-alpha.oozie-4.1.0-SNAPSHOT</version>
+    <version>2.2.0-SNAPSHOT.oozie-4.1.0-SNAPSHOT</version>
     <description>Apache Oozie Hadoop Distcp ${project.version}</description>
     <name>Apache Oozie Hadoop Distcp ${project.version}</name>
     <packaging>jar</packaging>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-distcp</artifactId>
-            <version>2.0.2-alpha</version>
+            <version>2.2.0-SNAPSHOT</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

Modified: oozie/trunk/hadooplibs/hadoop-test-0.23/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/hadooplibs/hadoop-test-0.23/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/hadooplibs/hadoop-test-0.23/pom.xml (original)
+++ oozie/trunk/hadooplibs/hadoop-test-0.23/pom.xml Tue Jun 11 16:56:40 2013
@@ -27,7 +27,7 @@
     </parent>
     <groupId>org.apache.oozie</groupId>
     <artifactId>oozie-hadoop-test</artifactId>
-    <version>0.23.6.oozie-4.1.0-SNAPSHOT</version>
+    <version>0.23.5.oozie-4.1.0-SNAPSHOT</version>
     <description>Apache Oozie Hadoop ${project.version} Test</description>
     <name>Apache Oozie Hadoop ${project.version} Test</name>
     <packaging>jar</packaging>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-minicluster</artifactId>
-            <version>0.23.6</version>
+            <version>0.23.5</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

Modified: oozie/trunk/hadooplibs/hadoop-test-2/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/hadooplibs/hadoop-test-2/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/hadooplibs/hadoop-test-2/pom.xml (original)
+++ oozie/trunk/hadooplibs/hadoop-test-2/pom.xml Tue Jun 11 16:56:40 2013
@@ -27,7 +27,7 @@
     </parent>
     <groupId>org.apache.oozie</groupId>
     <artifactId>oozie-hadoop-test</artifactId>
-    <version>2.0.2-alpha.oozie-4.1.0-SNAPSHOT</version>
+    <version>2.2.0-SNAPSHOT.oozie-4.1.0-SNAPSHOT</version>
     <description>Apache Oozie Hadoop ${project.version} Test</description>
     <name>Apache Oozie Hadoop ${project.version} Test</name>
     <packaging>jar</packaging>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-minicluster</artifactId>
-            <version>2.0.2-alpha</version>
+            <version>2.2.0-SNAPSHOT</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

Modified: oozie/trunk/minitest/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/minitest/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/minitest/pom.xml (original)
+++ oozie/trunk/minitest/pom.xml Tue Jun 11 16:56:40 2013
@@ -46,15 +46,13 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-            <version>1.1.1</version>
+            <groupId>org.apache.oozie</groupId>
+            <artifactId>oozie-hadoop</artifactId>
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-test</artifactId>
-            <version>1.1.1</version>
+            <groupId>org.apache.oozie</groupId>
+            <artifactId>oozie-hadoop-test</artifactId>
             <scope>test</scope>
         </dependency>
         <dependency>

Modified: oozie/trunk/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/pom.xml (original)
+++ oozie/trunk/pom.xml Tue Jun 11 16:56:40 2013
@@ -85,8 +85,9 @@
         <hadoop.auth.version>2.0.2-alpha</hadoop.auth.version>
 
          <!-- Sharelib component versions -->
-         <hive.version>0.9.0</hive.version>
-         <pig.version>0.9.0</pig.version>
+         <hive.version>0.10.0</hive.version>
+         <pig.version>0.10.1</pig.version>
+         <pig.classifier></pig.classifier>
          <sqoop.version>1.4.3</sqoop.version>
          <sqoop.classifier>hadoop100</sqoop.classifier>
          <streaming.version>${hadoop.version}</streaming.version>
@@ -384,6 +385,7 @@
                 <groupId>org.apache.pig</groupId>
                 <artifactId>pig</artifactId>
                 <version>${pig.version}</version>
+                <classifier>${pig.classifier}</classifier>
                 <exclusions>
                     <exclusion>
                         <groupId>org.apache.hadoop</groupId>
@@ -999,6 +1001,30 @@
             </build>
         </profile>
         <profile>
+            <id>hadoop-23</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+            <properties>
+               <hadoop.version>0.23.5</hadoop.version>
+               <hadoop.auth.version>0.23.5</hadoop.auth.version>
+               <pig.classifier>h2</pig.classifier>
+               <sqoop.classifier>hadoop23</sqoop.classifier>
+            </properties>
+        </profile>
+        <profile>
+            <id>hadoop-2</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+            <properties>
+               <hadoop.version>2.2.0-SNAPSHOT</hadoop.version>
+               <hadoop.auth.version>2.2.0-SNAPSHOT</hadoop.auth.version>
+               <pig.classifier>h2</pig.classifier>
+               <sqoop.classifier>hadoop200</sqoop.classifier>
+            </properties>
+        </profile>
+        <profile>
             <id>hadoop-3</id>
             <activation>
                 <activeByDefault>false</activeByDefault>
@@ -1006,6 +1032,8 @@
             <properties>
                <hadoop.version>3.0.0-SNAPSHOT</hadoop.version>
                <hadoop.auth.version>3.0.0-SNAPSHOT</hadoop.auth.version>
+               <pig.classifier>h2</pig.classifier>
+               <sqoop.classifier>hadoop200</sqoop.classifier>
             </properties>
         </profile>
         <profile>

Modified: oozie/trunk/release-log.txt
URL: http://svn.apache.org/viewvc/oozie/trunk/release-log.txt?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/release-log.txt (original)
+++ oozie/trunk/release-log.txt Tue Jun 11 16:56:40 2013
@@ -1,5 +1,6 @@
 -- Oozie 4.1.0 release (trunk - unreleased)
 
+OOZIE-1374 Make all unit tests run with Hadoop 2 (rohini)
 OOZIE-1394 Fix Bugs in Job and SLA Events (mona)
 OOZIE-1315 Refactor classes from launcher jar into Oozie sharelib (rkanter)
 OOZIE-1377 OpenJPA runtime enhancement should be disabled and update OpenJPA to 2.2.2 (tucu)

Modified: oozie/trunk/sharelib/pig/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/sharelib/pig/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/sharelib/pig/pom.xml (original)
+++ oozie/trunk/sharelib/pig/pom.xml Tue Jun 11 16:56:40 2013
@@ -41,6 +41,7 @@
         <dependency>
             <groupId>org.apache.pig</groupId>
             <artifactId>pig</artifactId>
+            <classifier>${pig.classifier}</classifier>
             <scope>compile</scope>
         </dependency>
         <dependency>

Modified: oozie/trunk/tools/pom.xml
URL: http://svn.apache.org/viewvc/oozie/trunk/tools/pom.xml?rev=1491873&r1=1491872&r2=1491873&view=diff
==============================================================================
--- oozie/trunk/tools/pom.xml (original)
+++ oozie/trunk/tools/pom.xml Tue Jun 11 16:56:40 2013
@@ -48,10 +48,9 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minicluster</artifactId>
-            <version>1.1.1</version>
-            <scope>compile</scope>
+            <groupId>org.apache.oozie</groupId>
+            <artifactId>oozie-hadoop-test</artifactId>
+            <scope>test</scope>
         </dependency>
          <dependency>
             <groupId>org.apache.oozie</groupId>