You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by ro...@apache.org on 2016/09/27 19:05:31 UTC

oozie git commit: OOZIE-2582 Populating external child Ids for action failures (abhishekbafna via rohini)

Repository: oozie
Updated Branches:
  refs/heads/master 76b60cc82 -> e8a9b24b9


OOZIE-2582 Populating external child Ids for action failures (abhishekbafna via rohini)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/e8a9b24b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/e8a9b24b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/e8a9b24b

Branch: refs/heads/master
Commit: e8a9b24b9916df20cfbc1c48e5e1a755151d8245
Parents: 76b60cc
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Tue Sep 27 12:05:19 2016 -0700
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Tue Sep 27 12:05:19 2016 -0700

----------------------------------------------------------------------
 .../action/hadoop/DistcpActionExecutor.java     | 27 +------
 .../action/hadoop/Hive2ActionExecutor.java      | 29 ++-----
 .../oozie/action/hadoop/HiveActionExecutor.java | 29 ++-----
 .../oozie/action/hadoop/JavaActionExecutor.java | 21 +++---
 .../action/hadoop/SparkActionExecutor.java      | 25 +------
 .../action/hadoop/SqoopActionExecutor.java      | 41 ++--------
 .../oozie/action/hadoop/TestDistcpMain.java     |  2 +
 examples/src/main/apps/hive/script.q            |  1 +
 release-log.txt                                 |  1 +
 .../apache/oozie/action/hadoop/DistcpMain.java  |  9 ++-
 .../apache/oozie/action/hadoop/HiveMain.java    |  5 +-
 .../action/hadoop/TestHiveActionExecutor.java   | 15 +---
 .../oozie/action/hadoop/TestHiveMain.java       |  8 +-
 .../apache/oozie/action/hadoop/Hive2Main.java   |  3 +-
 .../action/hadoop/TestHive2ActionExecutor.java  | 57 +++++++++++---
 .../oozie/action/hadoop/LauncherMain.java       | 79 ++++++++++----------
 .../apache/oozie/action/hadoop/ShellMain.java   |  2 +-
 .../org/apache/oozie/action/hadoop/PigMain.java |  8 +-
 .../oozie/action/hadoop/PigMainWithOldAPI.java  |  2 +-
 .../apache/oozie/action/hadoop/SparkMain.java   |  1 +
 .../apache/oozie/action/hadoop/SqoopMain.java   | 16 ++--
 .../action/hadoop/TestSqoopActionExecutor.java  | 22 +-----
 22 files changed, 158 insertions(+), 245 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
index 96726da..78cd257 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
@@ -18,24 +18,15 @@
 
 package org.apache.oozie.action.hadoop;
 
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.action.ActionExecutorException;
-import org.apache.oozie.action.ActionExecutor.Context;
-import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HadoopAccessorException;
-import org.apache.oozie.service.Services;
 import org.apache.oozie.util.XLog;
 import org.jdom.Element;
-import org.jdom.JDOMException;
+
+import java.util.ArrayList;
+import java.util.List;
 
 public class DistcpActionExecutor extends JavaActionExecutor{
     public static final String CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS = "org.apache.oozie.action.hadoop.DistcpMain";
@@ -124,16 +115,4 @@ public class DistcpActionExecutor extends JavaActionExecutor{
     protected String getLauncherMain(Configuration launcherConf, Element actionXml) {
         return launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS);
     }
-
-    @Override
-    protected void getActionData(FileSystem actionFs, RunningJob runningJob, WorkflowAction action, Context context)
-            throws HadoopAccessorException, JDOMException, IOException, URISyntaxException {
-        super.getActionData(actionFs, runningJob, action, context);
-        readExternalChildIDs(action, context);
-    }
-
-    @Override
-    protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-        return true;
-    }
 }

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
index b5b1bf9..2aed936 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
@@ -18,24 +18,17 @@
 
 package org.apache.oozie.action.hadoop;
 
-import static org.apache.oozie.action.hadoop.LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.action.ActionExecutorException;
-import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.HadoopAccessorException;
 import org.jdom.Element;
-import org.jdom.JDOMException;
 import org.jdom.Namespace;
 
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.apache.oozie.action.hadoop.LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS;
+
 public class Hive2ActionExecutor extends ScriptLanguageActionExecutor {
 
     private static final String HIVE2_MAIN_CLASS_NAME = "org.apache.oozie.action.hadoop.Hive2Main";
@@ -128,18 +121,6 @@ public class Hive2ActionExecutor extends ScriptLanguageActionExecutor {
         return conf;
     }
 
-    @Override
-    protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-        return true;
-    }
-
-    @Override
-    protected void getActionData(FileSystem actionFs, RunningJob runningJob, WorkflowAction action, Context context)
-            throws HadoopAccessorException, JDOMException, IOException, URISyntaxException {
-        super.getActionData(actionFs, runningJob, action, context);
-        readExternalChildIDs(action, context);
-    }
-
     /**
      * Return the sharelib name for the action.
      *

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
index c74e9e6..8e2453e 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
@@ -18,28 +18,21 @@
 
 package org.apache.oozie.action.hadoop;
 
-import static org.apache.oozie.action.hadoop.LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.action.ActionExecutorException;
-import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.client.XOozieClient;
 import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HadoopAccessorException;
 import org.apache.oozie.service.HadoopAccessorService;
 import org.jdom.Element;
-import org.jdom.JDOMException;
 import org.jdom.Namespace;
 
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.apache.oozie.action.hadoop.LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS;
+
 public class HiveActionExecutor extends ScriptLanguageActionExecutor {
 
     private static final String HIVE_MAIN_CLASS_NAME = "org.apache.oozie.action.hadoop.HiveMain";
@@ -119,18 +112,6 @@ public class HiveActionExecutor extends ScriptLanguageActionExecutor {
         return conf;
     }
 
-    @Override
-    protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-        return true;
-    }
-
-    @Override
-    protected void getActionData(FileSystem actionFs, RunningJob runningJob, WorkflowAction action, Context context)
-            throws HadoopAccessorException, JDOMException, IOException, URISyntaxException {
-        super.getActionData(actionFs, runningJob, action, context);
-        readExternalChildIDs(action, context);
-    }
-
     /**
      * Return the sharelib name for the action.
      *

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index ad07b57..65996d9 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -1465,6 +1465,17 @@ public class JavaActionExecutor extends ActionExecutor {
                         context.setExternalChildIDs(externalIDs);
                         LOG.info(XLog.STD, "Hadoop Jobs launched : [{0}]", externalIDs);
                     }
+                    else if (LauncherMapperHelper.hasOutputData(actionData)) {
+                        // Load stored Hadoop jobs ids and promote them as external child ids
+                        // This is for jobs launched with older release during upgrade to Oozie 4.3
+                        Properties props = PropertiesUtils.stringToProperties(actionData
+                                .get(LauncherMapper.ACTION_DATA_OUTPUT_PROPS));
+                        if (props.get(LauncherMain.HADOOP_JOBS) != null) {
+                            externalIDs = (String) props.get(LauncherMain.HADOOP_JOBS);
+                            context.setExternalChildIDs(externalIDs);
+                            LOG.info(XLog.STD, "Hadoop Jobs launched : [{0}]", externalIDs);
+                        }
+                    }
                 }
                 if (runningJob.isComplete()) {
                     // fetching action output and stats for the Map-Reduce action.
@@ -1566,16 +1577,6 @@ public class JavaActionExecutor extends ActionExecutor {
             throws HadoopAccessorException, JDOMException, IOException, URISyntaxException {
     }
 
-    protected final void readExternalChildIDs(WorkflowAction action, Context context) throws IOException {
-        if (action.getData() != null) {
-            // Load stored Hadoop jobs ids and promote them as external child ids
-            // See LauncherMain#writeExternalChildIDs for how they are written
-            Properties props = new Properties();
-            props.load(new StringReader(action.getData()));
-            context.setExternalChildIDs((String) props.get(LauncherMain.HADOOP_JOBS));
-        }
-    }
-
     protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
         Element eConf = XmlUtils.parseXml(action.getConf());
         Namespace ns = eConf.getNamespace();

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
index 15a641b..8e8d7d3 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
@@ -18,27 +18,22 @@
 
 package org.apache.oozie.action.hadoop;
 
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.action.ActionExecutorException;
 import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HadoopAccessorException;
 import org.apache.oozie.service.Services;
 import org.apache.oozie.service.SparkConfigurationService;
 import org.jdom.Element;
-import org.jdom.JDOMException;
 import org.jdom.Namespace;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 public class SparkActionExecutor extends JavaActionExecutor {
     public static final String SPARK_MAIN_CLASS_NAME = "org.apache.oozie.action.hadoop.SparkMain";
     public static final String TASK_USER_PRECEDENCE = "mapreduce.task.classpath.user.precedence"; // hadoop-2
@@ -165,16 +160,4 @@ public class SparkActionExecutor extends JavaActionExecutor {
     protected String getLauncherMain(Configuration launcherConf, Element actionXml) {
         return launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, SPARK_MAIN_CLASS_NAME);
     }
-
-    @Override
-    protected void getActionData(FileSystem actionFs, RunningJob runningJob, WorkflowAction action, Context context)
-            throws HadoopAccessorException, JDOMException, IOException, URISyntaxException {
-        super.getActionData(actionFs, runningJob, action, context);
-        readExternalChildIDs(action, context);
-    }
-
-    @Override
-    protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-        return true;
-    }
 }

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
index 6813a37..6cee32a 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
@@ -18,16 +18,7 @@
 
 package org.apache.oozie.action.hadoop;
 
-import java.io.IOException;
-import java.io.StringReader;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-import java.util.StringTokenizer;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.JobClient;
@@ -36,14 +27,18 @@ import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.action.ActionExecutorException;
 import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.HadoopAccessorException;
 import org.apache.oozie.util.XConfiguration;
-import org.apache.oozie.util.XmlUtils;
 import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XmlUtils;
 import org.jdom.Element;
-import org.jdom.JDOMException;
 import org.jdom.Namespace;
 
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.StringTokenizer;
+
 public class SqoopActionExecutor extends JavaActionExecutor {
 
   public static final String OOZIE_ACTION_EXTERNAL_STATS_WRITE = "oozie.action.external.stats.write";
@@ -231,28 +226,6 @@ public class SqoopActionExecutor extends JavaActionExecutor {
     }
 
     /**
-     * Get the stats and external child IDs
-     *
-     * @param actionFs the FileSystem object
-     * @param runningJob the runningJob
-     * @param action the Workflow action
-     * @param context executor context
-     *
-     */
-    @Override
-    protected void getActionData(FileSystem actionFs, RunningJob runningJob, WorkflowAction action, Context context)
-            throws HadoopAccessorException, JDOMException, IOException, URISyntaxException{
-        super.getActionData(actionFs, runningJob, action, context);
-        readExternalChildIDs(action, context);
-    }
-
-    @Override
-    protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-        return true;
-    }
-
-
-    /**
      * Return the sharelib name for the action.
      *
      * @return returns <code>sqoop</code>.

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistcpMain.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistcpMain.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistcpMain.java
index 098995e..bf08ccb 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistcpMain.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistcpMain.java
@@ -71,6 +71,8 @@ public class TestDistcpMain extends MainTestCase {
         // Check normal execution
         DistcpMain.main(new String[]{inputDir.toString(), outputDir.toString()});
         assertTrue(getFileSystem().exists(outputDir));
+        assertTrue(hadoopIdsFile.exists());
+        assertNotNull(LauncherMapper.getLocalFileContentStr(hadoopIdsFile, "", -1));
         fs.delete(outputDir,true);
 
         // Check exception handling

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/examples/src/main/apps/hive/script.q
----------------------------------------------------------------------
diff --git a/examples/src/main/apps/hive/script.q b/examples/src/main/apps/hive/script.q
index 3abc757..37d6564 100644
--- a/examples/src/main/apps/hive/script.q
+++ b/examples/src/main/apps/hive/script.q
@@ -15,5 +15,6 @@
 -- See the License for the specific language governing permissions and
 -- limitations under the License.
 --
+DROP TABLE IF EXISTS test;
 CREATE EXTERNAL TABLE test (a INT) STORED AS TEXTFILE LOCATION '${INPUT}';
 INSERT OVERWRITE DIRECTORY '${OUTPUT}' SELECT * FROM test;

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index ef11bb3..10a183a 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -3,6 +3,7 @@
 
 -- Oozie 4.3.0 release
 
+OOZIE-2582 Populating external child Ids for action failures (abhishekbafna via rohini)
 OOZIE-2678 Oozie job -kill doesn't work with tez jobs (abhishekbafna via rohini)
 OOZIE-2676 Make hadoop-2 as the default profile (gezapeti via rkanter)
 OOZIE-2487 Temporary workaround for Java 8 Javadoc errors (abhishekbafna via rkanter)

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------
diff --git a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
index 6ac5ad6..2b84f92 100644
--- a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
+++ b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
@@ -38,8 +38,11 @@ public class DistcpMain extends JavaMain {
     private Constructor<?> construct;
     private Object[] constArgs;
     private static final String DISTCP_LOG4J_PROPS = "distcp-log4j.properties";
-    private static final Pattern[] DISTCP_JOB_IDS_PATTERNS = { Pattern.compile("Job complete: (job_\\S*)"),
-            Pattern.compile("Job (job_\\S*) completed successfully") };
+    private static final Pattern[] DISTCP_JOB_IDS_PATTERNS = {
+            Pattern.compile("Job complete: (job_\\S*)"),
+            Pattern.compile("Job (job_\\S*) completed successfully"),
+            Pattern.compile("Submitted application (application[0-9_]*)")
+    };
     public static void main(String[] args) throws Exception {
         run(DistcpMain.class, args);
     }
@@ -81,6 +84,7 @@ public class DistcpMain extends JavaMain {
             throw new JavaMainException(ex.getCause());
         }
         finally {
+            System.out.println("\n<<< Invocation of DistCp command completed <<<\n");
             writeExternalChildIDs(logFile, DISTCP_JOB_IDS_PATTERNS, "Distcp");
         }
     }
@@ -143,6 +147,7 @@ public class DistcpMain extends JavaMain {
         hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%-4r [%t] %-5p %c %x - %m%n");
         hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapred", "INFO, jobid");
         hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", "INFO, jobid");
+        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl", "INFO, jobid");
 
         String localProps = new File(DISTCP_LOG4J_PROPS).getAbsolutePath();
         OutputStream os1 = new FileOutputStream(localProps);

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
index 5866117..bbcaefa 100644
--- a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
+++ b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
@@ -42,8 +42,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 
 public class HiveMain extends LauncherMain {
     private static final Pattern[] HIVE_JOB_IDS_PATTERNS = {
-      Pattern.compile("Ended Job = (job_\\S*)"),
-      Pattern.compile("Executing on YARN cluster with App id (application[0-9_]*)")
+            Pattern.compile("Ended Job = (job_\\S*)"),
+            Pattern.compile("Submitted application (application[0-9_]*)")
     };
     private static final Set<String> DISALLOWED_HIVE_OPTIONS = new HashSet<String>();
 
@@ -167,6 +167,7 @@ public class HiveMain extends LauncherMain {
         hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%d [%t] %-5p %c %x - %m%n");
         hadoopProps.setProperty("log4j.logger.org.apache.hadoop.hive.ql.exec", "INFO, jobid");
         hadoopProps.setProperty("log4j.logger.SessionState", "INFO, jobid");
+        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl", "INFO, jobid");
 
         String localProps = new File(HIVE_L4J_PROPS).getAbsolutePath();
         OutputStream os1 = new FileOutputStream(localProps);

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
index b966d4b..12e1e91 100644
--- a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
+++ b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
@@ -27,7 +27,6 @@ import java.io.Writer;
 import java.text.MessageFormat;
 import java.util.Arrays;
 import java.util.Map;
-import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -181,14 +180,9 @@ public class TestHiveActionExecutor extends ActionExecutorTestCase {
             ae.check(context, context.getAction());
             assertTrue(launcherId.equals(context.getAction().getExternalId()));
             assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
-            assertNotNull(context.getAction().getData());
             ae.end(context, context.getAction());
             assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
-            assertNotNull(context.getAction().getData());
-            Properties outputData = new Properties();
-            outputData.load(new StringReader(context.getAction().getData()));
-            assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-            assertEquals(outputData.get(LauncherMain.HADOOP_JOBS), context.getExternalChildIDs());
+            assertNotNull(context.getExternalChildIDs());
             //while this works in a real cluster, it does not with miniMR
             //assertTrue(outputData.getProperty(LauncherMain.HADOOP_JOBS).trim().length() > 0);
             //assertTrue(!actionData.get(LauncherMapper.ACTION_DATA_EXTERNAL_CHILD_IDS).isEmpty());
@@ -215,14 +209,9 @@ public class TestHiveActionExecutor extends ActionExecutorTestCase {
             ae.check(context, context.getAction());
             assertTrue(launcherId.equals(context.getAction().getExternalId()));
             assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
-            assertNotNull(context.getAction().getData());
             ae.end(context, context.getAction());
             assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
-            assertNotNull(context.getAction().getData());
-            Properties outputData = new Properties();
-            outputData.load(new StringReader(context.getAction().getData()));
-            assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-            assertEquals(outputData.get(LauncherMain.HADOOP_JOBS), context.getExternalChildIDs());
+            assertNotNull(context.getAction().getExternalChildIDs());
             //while this works in a real cluster, it does not with miniMR
             //assertTrue(outputData.getProperty(LauncherMain.HADOOP_JOBS).trim().length() > 0);
             //assertTrue(!actionData.get(LauncherMapper.ACTION_DATA_EXTERNAL_CHILD_IDS).isEmpty());

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveMain.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveMain.java b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveMain.java
index d72e298..879ae2d 100644
--- a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveMain.java
+++ b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveMain.java
@@ -121,11 +121,11 @@ public class TestHiveMain extends MainTestCase {
             assertEquals(props.getProperty("oozie.hive.args.size"), "1");
             File hiveSite = new File(classPathDir, "hive-site.xml");
 
-            File outputDataFile = new File(getTestCaseDir(), "outputdata.properties");
+            File externalChildIdsFile = new File(getTestCaseDir(), "externalChildIDs");
 
             setSystemProperty("oozie.launcher.job.id", "" + System.currentTimeMillis());
             setSystemProperty("oozie.action.conf.xml", actionXml.getAbsolutePath());
-            setSystemProperty("oozie.action.output.properties", outputDataFile.getAbsolutePath());
+            setSystemProperty("oozie.action.externalChildIDs", externalChildIdsFile.getAbsolutePath());
 
             new LauncherSecurityManager();
             String user = System.getProperty("user.name");
@@ -155,8 +155,8 @@ public class TestHiveMain extends MainTestCase {
                 MiniHCatServer.resetHiveConfStaticVariables();
             }
 
-            assertTrue(outputDataFile.exists());
-            assertNotNull(LauncherMapper.getLocalFileContentStr(outputDataFile, "", -1));
+            assertTrue(externalChildIdsFile.exists());
+            assertNotNull(LauncherMapper.getLocalFileContentStr(externalChildIdsFile, "", -1));
 
 //TODO: I cannot figure out why when log file is not created in this testcase, it works when running in Launcher
 //            Properties props = new Properties();

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
index e122608..54f7039 100644
--- a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
+++ b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
@@ -39,7 +39,8 @@ import org.apache.hive.beeline.BeeLine;
 
 public class Hive2Main extends LauncherMain {
     private static final Pattern[] HIVE2_JOB_IDS_PATTERNS = {
-            Pattern.compile("Ended Job = (job_\\S*)")
+            Pattern.compile("Ended Job = (job_\\S*)"),
+            Pattern.compile("Submitted application (application[0-9_]*)")
     };
     private static final Set<String> DISALLOWED_BEELINE_OPTIONS = new HashSet<String>();
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
index 5963e42..b023b79 100644
--- a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
+++ b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
@@ -25,7 +25,6 @@ import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -226,11 +225,8 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
             assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
             ae.end(context, context.getAction());
             assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
-            assertNotNull(context.getAction().getData());
-            Properties outputData = new Properties();
-            outputData.load(new StringReader(context.getAction().getData()));
-            assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-            assertEquals(outputData.get(LauncherMain.HADOOP_JOBS), context.getExternalChildIDs());
+            // Disabled external child id check until Hive version is upgraded to 0.14+
+            //assertNotNull(context.getExternalChildIDs());
             assertTrue(fs.exists(outputDir));
             assertTrue(fs.isDirectory(outputDir));
         }
@@ -265,14 +261,53 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
             assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
             ae.end(context, context.getAction());
             assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
-            assertNotNull(context.getAction().getData());
-            Properties outputData = new Properties();
-            outputData.load(new StringReader(context.getAction().getData()));
-            assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-            assertEquals(outputData.get(LauncherMain.HADOOP_JOBS), context.getExternalChildIDs());
+            // Disabled external child id check until Hive version is upgraded to 0.14+
+            //assertNotNull(context.getExternalChildIDs());
             assertTrue(fs.exists(outputDir));
             assertTrue(fs.isDirectory(outputDir));
         }
+        // Negative testcase with incorrect hive-query.
+        {
+            String query = getHive2BadScript(inputDir.toString(), outputDir.toString());
+            Writer dataWriter = new OutputStreamWriter(fs.create(new Path(inputDir, DATA_FILENAME)));
+            dataWriter.write(SAMPLE_DATA_TEXT);
+            dataWriter.close();
+            Context context = createContext(getQueryActionXml(query));
+            final RunningJob launcherJob = submitAction(context, Namespace.getNamespace("uri:oozie:hive2-action:0.2"));
+            String launcherId = context.getAction().getExternalId();
+            waitFor(200 * 1000, new Predicate() {
+                @Override
+                public boolean evaluate() throws Exception {
+                    return launcherJob.isComplete();
+                }
+            });
+            assertTrue(launcherJob.isSuccessful());
+            Configuration conf = new XConfiguration();
+            conf.set("user.name", getTestUser());
+            Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
+                    conf);
+            assertFalse(LauncherMapperHelper.hasIdSwap(actionData));
+            Hive2ActionExecutor ae = new Hive2ActionExecutor();
+            ae.check(context, context.getAction());
+            assertTrue(launcherId.equals(context.getAction().getExternalId()));
+            assertEquals("FAILED/KILLED", context.getAction().getExternalStatus());
+            ae.end(context, context.getAction());
+            assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
+            assertNull(context.getExternalChildIDs());
+        }
+    }
+
+    private String getHive2BadScript(String inputPath, String outputPath) {
+        StringBuilder buffer = new StringBuilder(NEW_LINE);
+        buffer.append("set -v;").append(NEW_LINE);
+        buffer.append("DROP TABLE IF EXISTS test;").append(NEW_LINE);
+        buffer.append("CREATE EXTERNAL TABLE test (a INT) STORED AS");
+        buffer.append(NEW_LINE).append("TEXTFILE LOCATION '");
+        buffer.append(inputPath).append("';").append(NEW_LINE);
+        buffer.append("INSERT OVERWRITE DIRECTORY '");
+        buffer.append(outputPath).append("'").append(NEW_LINE);
+        buffer.append("SELECT (a-1) FROM test-bad;").append(NEW_LINE);
+        return buffer.toString();
     }
 
     private RunningJob submitAction(Context context, Namespace ns) throws Exception {

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
index 7c5713c..815f60b 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
@@ -29,8 +29,10 @@ import java.io.OutputStream;
 import java.io.StringWriter;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.LinkedHashSet;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -42,6 +44,11 @@ import org.apache.hadoop.mapred.JobConf;
 
 public abstract class LauncherMain {
 
+    public static final String ACTION_PREFIX = "oozie.action.";
+    public static final String EXTERNAL_CHILD_IDS = ACTION_PREFIX + "externalChildIDs";
+    public static final String EXTERNAL_ACTION_STATS = ACTION_PREFIX + "stats.properties";
+    public static final String EXTERNAL_STATS_WRITE = ACTION_PREFIX + "external.stats.write";
+    public static final String OUTPUT_PROPERTIES = ACTION_PREFIX + "output.properties";
     public static final String HADOOP_JOBS = "hadoopJobs";
     public static final String MAPREDUCE_JOB_TAGS = "mapreduce.job.tags";
     public static final String TEZ_APPLICATION_TAGS = "tez.application.tags";
@@ -53,58 +60,52 @@ public abstract class LauncherMain {
         main.run(args);
     }
 
-    protected static Properties getHadoopJobIds(String logFile, Pattern[] patterns) throws IOException {
-        Properties props = new Properties();
-        StringBuffer sb = new StringBuffer(100);
+    protected static String getHadoopJobIds(String logFile, Pattern[] patterns) {
+        Set<String> jobIds = new LinkedHashSet<String>();
         if (!new File(logFile).exists()) {
-            System.err.println("Log file: " + logFile + "  not present. Therefore no Hadoop jobids found");
-            props.setProperty(HADOOP_JOBS, "");
+            System.err.println("Log file: " + logFile + "  not present. Therefore no Hadoop job IDs found.");
         }
         else {
-            BufferedReader br = new BufferedReader(new FileReader(logFile));
-            String line = br.readLine();
-            String separator = "";
-            while (line != null) {
-                for (Pattern pattern : patterns) {
-                    Matcher matcher = pattern.matcher(line);
-                    if (matcher.find()) {
-                        String jobId = matcher.group(1);
-                        if (StringUtils.isEmpty(jobId) || jobId.equalsIgnoreCase("NULL")) {
-                            continue;
+            try (BufferedReader br = new BufferedReader(new FileReader(logFile))) {
+                String line = br.readLine();
+                while (line != null) {
+                    for (Pattern pattern : patterns) {
+                        Matcher matcher = pattern.matcher(line);
+                        if (matcher.find()) {
+                            String jobId = matcher.group(1);
+                            if (StringUtils.isEmpty(jobId) || jobId.equalsIgnoreCase("NULL")) {
+                                continue;
+                            }
+                            jobId = jobId.replaceAll("application", "job");
+                            jobIds.add(jobId);
                         }
-                        jobId = jobId.replaceAll("application","job");
-                        sb.append(separator).append(jobId);
-                        separator = ",";
                     }
+                    line = br.readLine();
                 }
-                line = br.readLine();
+            } catch (IOException e) {
+                System.out.println("WARN: Error getting Hadoop Job IDs. logFile: " + logFile);
+                e.printStackTrace(System.out);
             }
-            br.close();
-            props.setProperty(HADOOP_JOBS, sb.toString());
         }
-        return props;
+        return jobIds.isEmpty() ? null : StringUtils.join(jobIds, ",");
     }
 
     protected static void writeExternalChildIDs(String logFile, Pattern[] patterns, String name) {
         // Harvesting and recording Hadoop Job IDs
-        // See JavaActionExecutor#readExternalChildIDs for how they are read
-        try {
-            Properties jobIds = getHadoopJobIds(logFile, patterns);
-            File file = new File(System.getProperty(LauncherMapper.ACTION_PREFIX
-                    + LauncherMapper.ACTION_DATA_OUTPUT_PROPS));
-            OutputStream os = new FileOutputStream(file);
-            try {
-                jobIds.store(os, "");
+        String jobIds = getHadoopJobIds(logFile, patterns);
+        if (jobIds != null) {
+            File externalChildIdsFile = new File(System.getProperty(EXTERNAL_CHILD_IDS));
+            try (OutputStream externalChildIdsStream = new FileOutputStream(externalChildIdsFile)) {
+                externalChildIdsStream.write(jobIds.getBytes());
+                System.out.println("Hadoop Job IDs executed by " + name + ": " + jobIds);
+                System.out.println();
+            } catch (IOException e) {
+                System.out.println("WARN: Error while writing to external child ids file: " +
+                        System.getProperty(EXTERNAL_CHILD_IDS));
+                e.printStackTrace(System.out);
             }
-            finally {
-                os.close();
-            }
-            System.out.println(" Hadoop Job IDs executed by " + name + ": " + jobIds.getProperty(HADOOP_JOBS));
-            System.out.println();
-        }
-        catch (Exception e) {
-            System.out.println("WARN: Error getting Hadoop Job IDs executed by " + name);
-            e.printStackTrace(System.out);
+        } else {
+            System.out.println("No child hadoop job is executed.");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/ShellMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/ShellMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/ShellMain.java
index 680dbfc..1e9d8af 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/ShellMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/ShellMain.java
@@ -260,7 +260,7 @@ public class ShellMain extends LauncherMain {
 
             try {
                 if (needCaptured) {
-                    File file = new File(System.getProperty(LauncherMapper.ACTION_PREFIX + LauncherMapper.ACTION_DATA_OUTPUT_PROPS));
+                    File file = new File(System.getProperty(OUTPUT_PROPERTIES));
                     os = new BufferedWriter(new FileWriter(file));
                 }
                 while ((line = reader.readLine()) != null) {

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index 90354f3..87c4cc4 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -47,14 +47,11 @@ import java.util.regex.Pattern;
 
 public class PigMain extends LauncherMain {
     private static final Set<String> DISALLOWED_PIG_OPTIONS = new HashSet<String>();
-    public static final String ACTION_PREFIX = "oozie.action.";
-    public static final String EXTERNAL_CHILD_IDS = ACTION_PREFIX + "externalChildIDs";
-    public static final String EXTERNAL_ACTION_STATS = ACTION_PREFIX + "stats.properties";
-    public static final String EXTERNAL_STATS_WRITE = ACTION_PREFIX + "external.stats.write";
     public static final int STRING_BUFFER_SIZE = 100;
 
     private static final Pattern[] PIG_JOB_IDS_PATTERNS = {
-      Pattern.compile("HadoopJobId: (job_\\S*)")
+            Pattern.compile("HadoopJobId: (job_\\S*)"),
+            Pattern.compile("Submitted application (application[0-9_]*)")
     };
 
     static {
@@ -185,6 +182,7 @@ public class PigMain extends LauncherMain {
             hadoopProps.setProperty("log4j.appender.B.file", logFile);
             hadoopProps.setProperty("log4j.appender.B.layout", "org.apache.log4j.PatternLayout");
             hadoopProps.setProperty("log4j.appender.B.layout.ConversionPattern", "%d [%t] %-5p %c %x - %m%n");
+            hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl", "INFO, B");
 
             String localProps = new File("piglog4j.properties").getAbsolutePath();
             OutputStream os1 = new FileOutputStream(localProps);

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
index a5291d9..b84e2b5 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
@@ -248,7 +248,7 @@ public class PigMainWithOldAPI extends LauncherMain {
 
         // harvesting and recording Hadoop Job IDs
         Properties jobIds = getHadoopJobIds(logFile);
-        File file = new File(System.getProperty(LauncherMapper.ACTION_PREFIX + LauncherMapper.ACTION_DATA_OUTPUT_PROPS));
+        File file = new File(System.getProperty(OUTPUT_PROPERTIES));
         os = new FileOutputStream(file);
         jobIds.store(os, "");
         os.close();

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
index 38e8e8c..bf2869b 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
@@ -212,6 +212,7 @@ public class SparkMain extends LauncherMain {
             runSpark(sparkArgs.toArray(new String[sparkArgs.size()]));
         }
         finally {
+            System.out.println("\n<<< Invocation of Spark command completed <<<\n");
             writeExternalChildIDs(logFile, SPARK_JOB_IDS_PATTERNS, "Spark");
         }
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
index 623fd2e..cb65eb8 100644
--- a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
+++ b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
@@ -37,7 +37,9 @@ public class SqoopMain extends LauncherMain {
     public static final String SQOOP_SITE_CONF = "sqoop-site.xml";
 
     private static final Pattern[] SQOOP_JOB_IDS_PATTERNS = {
-      Pattern.compile("Job complete: (job_\\S*)"), Pattern.compile("Job (job_\\S*) completed successfully")
+            Pattern.compile("Job complete: (job_\\S*)"),
+            Pattern.compile("Job (job_\\S*) has completed successfully"),
+            Pattern.compile("Submitted application (application[0-9_]*)")
     };
 
     private static final String SQOOP_LOG4J_PROPS = "sqoop-log4j.properties";
@@ -135,6 +137,7 @@ public class SqoopMain extends LauncherMain {
         hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%d [%t] %-5p %c %x - %m%n");
         hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapred", "INFO, jobid, A");
         hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", "INFO, jobid, A");
+        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl", "INFO, jobid");
 
         String localProps = new File(SQOOP_LOG4J_PROPS).getAbsolutePath();
         OutputStream os1 = new FileOutputStream(localProps);
@@ -182,13 +185,10 @@ public class SqoopMain extends LauncherMain {
                 }
             }
         }
-
-        System.out.println();
-        System.out.println("<<< Invocation of Sqoop command completed <<<");
-        System.out.println();
-
-        // harvesting and recording Hadoop Job IDs
-        writeExternalChildIDs(logFile, SQOOP_JOB_IDS_PATTERNS, "Sqoop");
+        finally {
+            System.out.println("\n<<< Invocation of Sqoop command completed <<<\n");
+            writeExternalChildIDs(logFile, SQOOP_JOB_IDS_PATTERNS, "Sqoop");
+        }
     }
 
     protected void runSqoopJob(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/oozie/blob/e8a9b24b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
index 6474092..166d939 100644
--- a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
+++ b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
@@ -195,7 +195,6 @@ public class TestSqoopActionExecutor extends ActionExecutorTestCase {
         ae.check(context, context.getAction());
         assertTrue(launcherId.equals(context.getAction().getExternalId()));
         assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
-        assertNotNull(context.getAction().getData());
         assertNotNull(context.getAction().getExternalChildIDs());
         ae.end(context, context.getAction());
         assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
@@ -215,12 +214,6 @@ public class TestSqoopActionExecutor extends ActionExecutorTestCase {
         }
         br.close();
         assertEquals(3, count);
-
-        assertNotNull(context.getAction().getData());
-        Properties outputData = new Properties();
-        outputData.load(new StringReader(context.getAction().getData()));
-        assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-        assertTrue(outputData.getProperty(LauncherMain.HADOOP_JOBS).trim().length() > 0);
     }
 
     public void testSqoopEval() throws Exception {
@@ -243,20 +236,13 @@ public class TestSqoopActionExecutor extends ActionExecutorTestCase {
         ae.check(context, context.getAction());
         assertTrue(launcherId.equals(context.getAction().getExternalId()));
         assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
-        assertNotNull(context.getAction().getExternalChildIDs());
-        assertEquals(0, context.getAction().getExternalChildIDs().length());
+        assertNull(context.getAction().getExternalChildIDs());
         ae.end(context, context.getAction());
         assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
 
         String hadoopCounters = context.getVar(MapReduceActionExecutor.HADOOP_COUNTERS);
         assertNotNull(hadoopCounters);
         assertTrue(hadoopCounters.isEmpty());
-
-        assertNotNull(context.getAction().getData());
-        Properties outputData = new Properties();
-        outputData.load(new StringReader(context.getAction().getData()));
-        assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-        assertEquals(0, outputData.getProperty(LauncherMain.HADOOP_JOBS).trim().length());
     }
 
     public void testSqoopActionFreeFormQuery() throws Exception {
@@ -306,12 +292,6 @@ public class TestSqoopActionExecutor extends ActionExecutorTestCase {
             br.close();
         }
         assertEquals(3, count);
-
-        assertNotNull(context.getAction().getData());
-        Properties outputData = new Properties();
-        outputData.load(new StringReader(context.getAction().getData()));
-        assertTrue(outputData.containsKey(LauncherMain.HADOOP_JOBS));
-        assertTrue(outputData.getProperty(LauncherMain.HADOOP_JOBS).trim().length() > 0);
     }