You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by ge...@apache.org on 2016/10/10 11:52:44 UTC

[45/50] [abbrv] oozie git commit: Merge branch 'apache_forked_master' into apache_forked_oya

Merge branch 'apache_forked_master' into apache_forked_oya

 Conflicts:
	core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
	core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
	core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
	core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
	core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
	core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
	core/src/main/java/org/apache/oozie/service/ShareLibService.java
	core/src/main/java/org/apache/oozie/util/JobUtils.java
	core/src/main/java/org/apache/oozie/util/NamedThreadFactory.java
	core/src/main/resources/oozie-default.xml
	core/src/test/java/org/apache/oozie/QueryServlet.java
	core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
	core/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
	core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
	docs/src/site/twiki/DG_QuickStart.twiki
	docs/src/site/twiki/ENG_Building.twiki
	hadooplibs/hadoop-auth-1/pom.xml
	hadooplibs/hadoop-auth-2/pom.xml
	hadooplibs/hadoop-distcp-1/pom.xml
	hadooplibs/hadoop-distcp-2/pom.xml
	hadooplibs/hadoop-distcp-3/pom.xml
	hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
	hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
	hadooplibs/hadoop-utils-1/pom.xml
	hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
	hadooplibs/hadoop-utils-2/pom.xml
	hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
	hadooplibs/hadoop-utils-3/pom.xml
	hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
	hadooplibs/pom.xml
	pom.xml
	release-log.txt
	sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
	sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
	sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/OozieLauncherOutputFormat.java
	sharelib/spark/pom.xml
	sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java

Change-Id: I6697e098f84051365e3b11d93eb6effe124ed47d


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/c49f382b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/c49f382b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/c49f382b

Branch: refs/heads/oya
Commit: c49f382bef2d0b101dda9a40cdf36c4cba454f85
Parents: ed72570 3eca3c2
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Fri Sep 30 12:33:27 2016 +0200
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Fri Sep 30 12:33:27 2016 +0200

----------------------------------------------------------------------
 bin/test-patch                                  |    2 +-
 bin/test-patch-10-compile                       |    4 +-
 client/pom.xml                                  |    4 +-
 .../java/org/apache/oozie/cli/OozieCLI.java     |    5 +-
 .../java/org/apache/oozie/client/BundleJob.java |    4 +-
 .../apache/oozie/client/CoordinatorAction.java  |    2 +-
 .../org/apache/oozie/client/CoordinatorJob.java |    4 +-
 .../main/java/org/apache/oozie/client/Job.java  |    6 +-
 .../org/apache/oozie/client/OozieClient.java    |   23 +-
 .../java/org/apache/oozie/client/SLAEvent.java  |   46 +-
 .../org/apache/oozie/client/WorkflowAction.java |    2 +-
 .../org/apache/oozie/client/WorkflowJob.java    |    2 +-
 .../org/apache/oozie/client/XOozieClient.java   |   31 +-
 .../org/apache/oozie/client/rest/JsonTags.java  |  438 +++---
 .../org/apache/oozie/client/rest/JsonUtils.java |    9 +-
 .../apache/oozie/client/rest/RestConstants.java |  192 +--
 .../src/main/resources/oozie-workflow-0.5.xsd   |    1 +
 core/pom.xml                                    |   45 +-
 .../org/apache/oozie/CoordinatorEngine.java     |    6 +-
 .../main/java/org/apache/oozie/DagEngine.java   |    1 +
 .../main/java/org/apache/oozie/ErrorCode.java   |    4 +-
 .../org/apache/oozie/LocalOozieClientCoord.java |   25 +
 .../org/apache/oozie/action/ActionExecutor.java |   46 +-
 .../action/hadoop/DistcpActionExecutor.java     |   24 +-
 .../oozie/action/hadoop/FsActionExecutor.java   |    9 +-
 .../action/hadoop/HCatCredentialHelper.java     |    8 +-
 .../oozie/action/hadoop/HCatCredentials.java    |   49 +-
 .../oozie/action/hadoop/HadoopELFunctions.java  |    5 -
 .../action/hadoop/Hive2ActionExecutor.java      |   30 +-
 .../oozie/action/hadoop/Hive2Credentials.java   |   17 +-
 .../oozie/action/hadoop/HiveActionExecutor.java |   28 +-
 .../oozie/action/hadoop/JavaActionExecutor.java |   75 +-
 .../action/hadoop/LauncherMapperHelper.java     |   21 +-
 .../action/hadoop/MapReduceActionExecutor.java  |    6 +
 .../action/hadoop/ShellActionExecutor.java      |   10 +
 .../action/hadoop/SparkActionExecutor.java      |   30 +-
 .../action/hadoop/SqoopActionExecutor.java      |   25 +-
 .../action/oozie/SubWorkflowActionExecutor.java |   13 +-
 .../org/apache/oozie/client/rest/JsonBean.java  |    4 +-
 .../bundle/BundleCoordSubmitXCommand.java       |   77 +
 .../command/bundle/BundleStartXCommand.java     |    3 +-
 .../command/coord/CoordActionCheckXCommand.java |    8 +-
 .../coord/CoordActionInputCheckXCommand.java    |   13 +-
 .../coord/CoordActionsIgnoreXCommand.java       |    2 +-
 .../oozie/command/coord/CoordRerunXCommand.java |    3 +-
 .../command/coord/CoordSubmitXCommand.java      |    6 +-
 .../oozie/command/wf/ActionCheckXCommand.java   |    2 +-
 .../oozie/command/wf/ActionEndXCommand.java     |    2 +-
 .../oozie/command/wf/ActionStartXCommand.java   |   40 +-
 .../apache/oozie/command/wf/ActionXCommand.java |   92 +-
 .../command/wf/CompletedActionXCommand.java     |    2 +-
 .../command/wf/ForkedActionStartXCommand.java   |    7 +
 .../apache/oozie/command/wf/SignalXCommand.java |   65 +-
 .../oozie/command/wf/SubmitMRXCommand.java      |    3 -
 .../apache/oozie/command/wf/SubmitXCommand.java |   24 +
 .../wf/WorkflowNotificationXCommand.java        |    6 +
 .../oozie/compression/CompressionCodec.java     |    8 +-
 .../input/dependency/CoordInputDependency.java  |   34 +-
 .../input/logic/CoordInputLogicEvaluator.java   |    6 +-
 .../org/apache/oozie/dependency/URIHandler.java |   34 +-
 .../dependency/hcat/HCatDependencyCache.java    |   20 +-
 .../java/org/apache/oozie/event/EventQueue.java |   18 +-
 .../oozie/executor/jpa/BulkJPAExecutor.java     |   91 +-
 .../jpa/BundleJobInfoGetJPAExecutor.java        |   14 +-
 .../executor/jpa/BundleJobQueryExecutor.java    |    2 +-
 .../jpa/CoordJobInfoGetJPAExecutor.java         |   14 +-
 .../apache/oozie/executor/jpa/JPAExecutor.java  |    4 +-
 .../jpa/WorkflowsJobGetJPAExecutor.java         |   42 +-
 .../org/apache/oozie/jms/ConnectionContext.java |   18 +-
 .../org/apache/oozie/jms/MessageHandler.java    |    2 +-
 .../java/org/apache/oozie/lock/MemoryLocks.java |   82 +-
 .../oozie/service/ActionConfFileType.java       |   56 +
 .../org/apache/oozie/service/ActionService.java |    5 +
 .../oozie/service/AuthorizationService.java     |    6 +-
 .../oozie/service/CallableQueueService.java     |    8 +-
 .../oozie/service/ConfigurationService.java     |    1 +
 .../oozie/service/HadoopAccessorService.java    |   98 +-
 .../oozie/service/LiteWorkflowStoreService.java |   13 +-
 .../oozie/service/MemoryLocksService.java       |    9 +-
 .../apache/oozie/service/RecoveryService.java   |  107 +-
 .../apache/oozie/service/SchedulerService.java  |   11 +-
 .../oozie/service/SchemaCheckerService.java     |    8 +-
 .../java/org/apache/oozie/service/Service.java  |   12 +-
 .../apache/oozie/service/ShareLibService.java   |    8 +-
 .../apache/oozie/service/ZKLocksService.java    |   88 +-
 .../org/apache/oozie/service/ZKUUIDService.java |    2 +-
 .../org/apache/oozie/sla/SLACalculator.java     |    2 +-
 .../apache/oozie/sla/SLACalculatorMemory.java   |   13 +-
 .../org/apache/oozie/sla/SLASummaryBean.java    |    8 +-
 .../apache/oozie/store/StoreStatusFilter.java   |  138 +-
 .../org/apache/oozie/util/Instrumentable.java   |    2 +-
 .../apache/oozie/util/JaasConfiguration.java    |   16 +-
 .../java/org/apache/oozie/util/JobUtils.java    |    4 +-
 .../org/apache/oozie/util/JobsFilterUtils.java  |    4 +
 .../oozie/util/MetricsInstrumentation.java      |   13 +
 .../apache/oozie/util/NamedThreadFactory.java   |   38 +
 .../apache/oozie/util/PriorityDelayQueue.java   |    7 +-
 .../oozie/util/TimestampedMessageParser.java    |    3 +-
 .../java/org/apache/oozie/util/XCallable.java   |   16 +-
 .../org/apache/oozie/workflow/WorkflowApp.java  |    4 +-
 .../apache/oozie/workflow/WorkflowInstance.java |   40 +-
 .../org/apache/oozie/workflow/WorkflowLib.java  |   18 +-
 .../oozie/workflow/lite/ActionNodeDef.java      |    5 +-
 .../oozie/workflow/lite/ControlNodeHandler.java |    6 +
 .../workflow/lite/LiteWorkflowAppParser.java    |  382 +----
 .../workflow/lite/LiteWorkflowValidator.java    |  351 +++++
 .../org/apache/oozie/workflow/lite/NodeDef.java |   87 +-
 .../apache/oozie/workflow/lite/NodeHandler.java |   28 +-
 core/src/main/resources/oozie-default.xml       |   71 +-
 .../org/apache/hadoop/examples/SleepJob.java    |    4 +-
 .../action/hadoop/ActionExecutorTestCase.java   |    2 -
 .../oozie/action/hadoop/TestDistcpMain.java     |    6 +-
 .../action/hadoop/TestHCatCredentials.java      |  258 ++++
 .../action/hadoop/TestJavaActionExecutor.java   |   49 +-
 .../action/hadoop/TestShellActionExecutor.java  |   67 +-
 .../oozie/TestSubWorkflowActionExecutor.java    |   83 +-
 .../org/apache/oozie/client/TestOozieCLI.java   |   35 +-
 .../oozie/client/TestWorkflowXClient.java       |    4 +-
 .../bundle/TestBundleChangeXCommand.java        |    5 +-
 .../bundle/TestBundleSubmitXCommand.java        |   44 +-
 .../coord/TestAbandonedCoordChecker.java        |   17 +-
 .../TestCoordActionInputCheckXCommand.java      |    4 +-
 .../coord/TestCoordActionsKillXCommand.java     |    1 +
 .../command/coord/TestCoordChangeXCommand.java  |    3 +
 .../command/coord/TestCoordRerunXCommand.java   |   50 +-
 .../command/coord/TestCoordSubmitXCommand.java  |    2 +-
 .../command/wf/TestActionCheckXCommand.java     |    1 -
 .../command/wf/TestActionStartXCommand.java     |    4 -
 .../oozie/command/wf/TestActionUserRetry.java   |  215 +++
 .../wf/TestForkedActionStartXCommand.java       |   77 -
 .../oozie/command/wf/TestSignalXCommand.java    |    2 +-
 .../oozie/command/wf/TestSubmitXCommand.java    |   84 +-
 .../command/wf/TestWorkflowKillXCommand.java    |   19 +
 .../wf/TestWorkflowNotificationXCommand.java    |   67 +
 .../apache/oozie/event/TestEventGeneration.java |   20 +-
 .../jpa/TestBundleJobInfoGetJPAExecutor.java    |  110 ++
 .../jpa/TestBundleJobQueryExecutor.java         |    2 +-
 .../jpa/TestCoordJobInfoGetJPAExecutor.java     |  136 +-
 .../jpa/TestWorkflowsJobGetJPAExecutor.java     |   41 +
 .../org/apache/oozie/lock/TestMemoryLocks.java  |  276 ++--
 .../oozie/service/TestActionConfFileType.java   |   52 +
 .../oozie/service/TestAuthorizationService.java |    5 +-
 .../oozie/service/TestConfigurationService.java |    2 +
 ...TestHAPartitionDependencyManagerService.java |    1 +
 .../service/TestHadoopAccessorService.java      |    4 +
 .../oozie/service/TestPauseTransitService.java  |    6 +-
 .../oozie/service/TestRecoveryService.java      |   35 +-
 .../oozie/service/TestShareLibService.java      |   29 +-
 .../oozie/service/TestZKLocksService.java       |  115 +-
 .../apache/oozie/service/TestZKUUIDService.java |   30 +-
 .../apache/oozie/servlet/TestV2SLAServlet.java  |    6 +-
 .../apache/oozie/store/TestDBWorkflowStore.java |    1 -
 .../org/apache/oozie/test/XDataTestCase.java    |    3 +-
 .../org/apache/oozie/test/XHCatTestCase.java    |    1 +
 .../java/org/apache/oozie/test/XTestCase.java   |   87 +-
 .../oozie/util/TestJaasConfiguration.java       |   19 +-
 .../oozie/util/TestMetricsInstrumentation.java  |   65 +
 .../util/TestTimestampedMessageParser.java      |    4 +-
 .../lite/TestLiteWorkflowAppParser.java         |  112 +-
 core/src/test/resources/wf-long.xml             | 1456 ++++++++++++++++++
 distro/pom.xml                                  |   19 +-
 distro/src/main/bin/oozie-setup.ps1             |    6 +-
 distro/src/main/bin/oozie-setup.sh              |   26 +-
 distro/src/main/tomcat/ssl-server.xml           |    2 +-
 docs/pom.xml                                    |    4 +-
 .../src/site/twiki/AG_ActionConfiguration.twiki |    6 +-
 docs/src/site/twiki/AG_Install.twiki            |  104 +-
 .../site/twiki/CoordinatorFunctionalSpec.twiki  |  331 ++++
 docs/src/site/twiki/DG_CommandLineTool.twiki    |   36 +-
 docs/src/site/twiki/DG_CoordinatorRerun.twiki   |    2 +-
 docs/src/site/twiki/DG_QuickStart.twiki         |   10 +-
 docs/src/site/twiki/DG_SLAMonitoring.twiki      |   26 +-
 .../site/twiki/DG_ShellActionExtension.twiki    |   68 +
 .../site/twiki/DG_SparkActionExtension.twiki    |   60 +-
 .../site/twiki/DG_SqoopActionExtension.twiki    |    2 +-
 docs/src/site/twiki/ENG_Building.twiki          |    8 +-
 docs/src/site/twiki/ENG_MiniOozie.twiki         |    2 +-
 docs/src/site/twiki/WebServicesAPI.twiki        |   12 +-
 .../src/site/twiki/WorkflowFunctionalSpec.twiki |   18 +-
 examples/pom.xml                                |   10 +-
 .../main/apps/coord-input-logic/coordinator.xml |   79 +
 .../main/apps/coord-input-logic/job.properties  |   25 +
 .../main/apps/coord-input-logic/workflow.xml    |   61 +
 examples/src/main/apps/distcp/workflow.xml      |    2 +
 examples/src/main/apps/hive/script.q            |    1 +
 .../oozie/example/TestLocalOozieExample.java    |    1 -
 login/pom.xml                                   |    4 +-
 minitest/pom.xml                                |    4 +-
 .../org/apache/oozie/test/WorkflowTest.java     |    1 -
 pom.xml                                         |  149 +-
 release-log.txt                                 |   97 +-
 sharelib/distcp/pom.xml                         |    4 +-
 .../apache/oozie/action/hadoop/DistcpMain.java  |    9 +-
 sharelib/hcatalog/pom.xml                       |    4 +-
 sharelib/hive/pom.xml                           |    9 +-
 .../apache/oozie/action/hadoop/HiveMain.java    |   22 +-
 .../action/hadoop/TestHiveActionExecutor.java   |   15 +-
 .../oozie/action/hadoop/TestHiveMain.java       |    8 +-
 sharelib/hive2/pom.xml                          |    9 +-
 .../apache/oozie/action/hadoop/Hive2Main.java   |    3 +-
 .../action/hadoop/TestHive2ActionExecutor.java  |   51 +-
 sharelib/oozie/pom.xml                          |    4 +-
 .../apache/oozie/action/hadoop/JavaMain.java    |    2 +
 .../oozie/action/hadoop/LauncherMain.java       |  103 +-
 .../oozie/action/hadoop/LauncherMapper.java     |   44 +-
 .../oozie/action/hadoop/LauncherURIHandler.java |    6 +-
 .../action/hadoop/OozieActionConfigurator.java  |    2 +-
 .../hadoop/OozieLauncherOutputCommitter.java    |   65 +
 .../hadoop/OozieLauncherOutputFormat.java       |   48 +
 .../apache/oozie/action/hadoop/ShellMain.java   |   55 +-
 sharelib/pig/pom.xml                            |    4 +-
 .../org/apache/oozie/action/hadoop/PigMain.java |   15 +-
 .../oozie/action/hadoop/PigMainWithOldAPI.java  |    6 +-
 .../action/hadoop/TestPigActionExecutor.java    |    1 -
 .../apache/oozie/action/hadoop/TestPigMain.java |    1 -
 sharelib/pom.xml                                |    4 +-
 sharelib/spark/pom.xml                          |   24 +-
 .../apache/oozie/action/hadoop/SparkMain.java   |  235 +--
 sharelib/sqoop/pom.xml                          |    8 +-
 .../apache/oozie/action/hadoop/SqoopMain.java   |   21 +-
 .../action/hadoop/TestSqoopActionExecutor.java  |   22 +-
 sharelib/streaming/pom.xml                      |    4 +-
 .../hadoop/TestMapReduceActionExecutor.java     |    2 +-
 src/main/assemblies/distro.xml                  |    8 -
 src/main/assemblies/examples.xml                |    4 +
 tools/pom.xml                                   |   10 +-
 .../java/org/apache/oozie/tools/OozieDBCLI.java |    3 +-
 .../apache/oozie/tools/OozieDBExportCLI.java    |  200 +++
 .../apache/oozie/tools/OozieDBImportCLI.java    |  214 +++
 .../apache/oozie/tools/OozieSharelibCLI.java    |    2 +-
 .../org/apache/oozie/tools/TestDBLoadDump.java  |  115 ++
 .../src/test/resources/dumpData/ooziedb_ac.json |    3 +
 .../test/resources/dumpData/ooziedb_bna.json    |    0
 .../test/resources/dumpData/ooziedb_bnj.json    |    0
 .../src/test/resources/dumpData/ooziedb_ca.json |    3 +
 .../src/test/resources/dumpData/ooziedb_cj.json |    3 +
 .../test/resources/dumpData/ooziedb_slareg.json |    0
 .../test/resources/dumpData/ooziedb_slasum.json |    0
 .../resources/dumpData/ooziedb_sysinfo.json     |    1 +
 .../src/test/resources/dumpData/ooziedb_wf.json |    1 +
 webapp/pom.xml                                  |    6 +-
 .../webapp/console/sla/js/oozie-sla-table.js    |   21 +-
 webapp/src/main/webapp/oozie-console.js         |    3 +-
 workflowgenerator/README.txt                    |  138 --
 workflowgenerator/pom.xml                       |  185 ---
 .../src/main/assemblies/workflowgenerator.xml   |   30 -
 .../client/OozieDiagramController.java          |  452 ------
 .../client/OozieWorkflowGenerator.java          |  765 ---------
 .../client/property/Property.java               |   75 -
 .../client/property/PropertyTable.java          |  620 --------
 .../client/property/PropertyTableFactory.java   |  137 --
 .../property/action/EmailPropertyTable.java     |  114 --
 .../client/property/action/FSPropertyTable.java |  477 ------
 .../property/action/JavaPropertyTable.java      |  189 ---
 .../property/action/MapReducePropertyTable.java |  156 --
 .../property/action/PigPropertyTable.java       |  168 --
 .../property/action/PipesPropertyTable.java     |  207 ---
 .../property/action/SSHPropertyTable.java       |  234 ---
 .../property/action/ShellPropertyTable.java     |  190 ---
 .../property/action/StreamingPropertyTable.java |  206 ---
 .../property/action/SubWFPropertyTable.java     |  140 --
 .../property/control/DecisionPropertyTable.java |  423 -----
 .../property/control/EndPropertyTable.java      |   67 -
 .../property/control/ForkPropertyTable.java     |  237 ---
 .../property/control/JoinPropertyTable.java     |   72 -
 .../property/control/KillPropertyTable.java     |   77 -
 .../property/control/StartPropertyTable.java    |   73 -
 .../property/control/WrkflowPropertyTable.java  |  169 --
 .../client/widget/NodeWidget.java               |  272 ----
 .../client/widget/action/EmailActionWidget.java |   48 -
 .../client/widget/action/FSActionWidget.java    |   48 -
 .../client/widget/action/JavaActionWidget.java  |   49 -
 .../widget/action/MapReduceActionWidget.java    |   48 -
 .../client/widget/action/PigActionWidget.java   |   49 -
 .../client/widget/action/PipesActionWidget.java |   48 -
 .../client/widget/action/SSHActionWidget.java   |   48 -
 .../client/widget/action/ShellActionWidget.java |   48 -
 .../widget/action/StreamingActionWidget.java    |   48 -
 .../client/widget/action/SubWFActionWidget.java |   48 -
 .../widget/control/DecisionNodeWidget.java      |   49 -
 .../client/widget/control/EndNodeWidget.java    |   56 -
 .../client/widget/control/ForkNodeWidget.java   |   50 -
 .../client/widget/control/JoinNodeWidget.java   |   47 -
 .../client/widget/control/KillNodeWidget.java   |   42 -
 .../client/widget/control/StartNodeWidget.java  |   69 -
 .../workflowgenerator/workflowgenerator.gwt.xml |   43 -
 .../src/main/resources/img/action.png           |  Bin 889 -> 0 bytes
 .../src/main/resources/img/add-btn.png          |  Bin 1656 -> 0 bytes
 .../src/main/resources/img/decision.png         |  Bin 1042 -> 0 bytes
 .../src/main/resources/img/del-btn.png          |  Bin 1521 -> 0 bytes
 .../src/main/resources/img/distcp-action.png    |  Bin 1584 -> 0 bytes
 .../src/main/resources/img/email-action.png     |  Bin 1454 -> 0 bytes
 .../src/main/resources/img/end.png              |  Bin 1246 -> 0 bytes
 .../src/main/resources/img/fork-shape.png       |  Bin 1591 -> 0 bytes
 .../src/main/resources/img/fork.png             |  Bin 2078 -> 0 bytes
 .../src/main/resources/img/fs-action.png        |  Bin 1310 -> 0 bytes
 .../src/main/resources/img/hive-action.png      |  Bin 1401 -> 0 bytes
 .../src/main/resources/img/java-action.png      |  Bin 1428 -> 0 bytes
 .../src/main/resources/img/join-shape.png       |  Bin 1609 -> 0 bytes
 .../src/main/resources/img/join.png             |  Bin 2141 -> 0 bytes
 .../src/main/resources/img/kill.png             |  Bin 2439 -> 0 bytes
 .../src/main/resources/img/mr-action.png        |  Bin 1335 -> 0 bytes
 .../src/main/resources/img/pig-action.png       |  Bin 1387 -> 0 bytes
 .../src/main/resources/img/pipes-action.png     |  Bin 1610 -> 0 bytes
 .../src/main/resources/img/shell-action.png     |  Bin 1612 -> 0 bytes
 .../src/main/resources/img/ssh-action.png       |  Bin 1346 -> 0 bytes
 .../src/main/resources/img/start-shape.png      |  Bin 1513 -> 0 bytes
 .../src/main/resources/img/start.png            |  Bin 2368 -> 0 bytes
 .../src/main/resources/img/streaming-action.png |  Bin 1566 -> 0 bytes
 .../src/main/resources/img/subwf-action.png     |  Bin 1562 -> 0 bytes
 .../workflowgenerator/workflowgenerator.gwt.xml |   43 -
 .../src/main/webapp/WEB-INF/web.xml             |   31 -
 .../src/main/webapp/workflowgenerator.css       |  200 ---
 .../src/main/webapp/workflowgenerator.html      |   66 -
 .../client/OozieWorkflowGeneratorTest.java      |   35 -
 .../workflowgeneratorTest-dev.launch            |   39 -
 .../workflowgeneratorTest-prod.launch           |   39 -
 zookeeper-security-tests/pom.xml                |    4 +-
 318 files changed, 7740 insertions(+), 9420 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/client/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/client/src/main/java/org/apache/oozie/client/XOozieClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentialHelper.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
index 12cc016,2aed936..ddb1504
--- a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
@@@ -127,19 -121,7 +121,7 @@@ public class Hive2ActionExecutor extend
          return conf;
      }
  
-     @Override
-     protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-         return true;
-     }
- 
-     @Override
-     protected void getActionData(FileSystem actionFs, WorkflowAction action, Context context)
-             throws HadoopAccessorException, JDOMException, IOException, URISyntaxException {
-         super.getActionData(actionFs, action, context);
-         readExternalChildIDs(action, context);
-     }
- 
--    /**
++     /**
       * Return the sharelib name for the action.
       *
       * @return returns <code>hive2</code>.

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index dbe7bc1,65996d9..0515d39
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@@ -148,10 -120,18 +148,12 @@@ public class JavaActionExecutor extend
      protected XLog LOG = XLog.getLog(getClass());
      private static final Pattern heapPattern = Pattern.compile("-Xmx(([0-9]+)[mMgG])");
      private static final String JAVA_TMP_DIR_SETTINGS = "-Djava.io.tmpdir=";
 -    public static final String CONF_HADOOP_YARN_UBER_MODE = "oozie.action.launcher." + HADOOP_YARN_UBER_MODE;
 -    public static final String HADOOP_JOB_CLASSLOADER = "mapreduce.job.classloader";
 -    public static final String HADOOP_USER_CLASSPATH_FIRST = "mapreduce.user.classpath.first";
 -    public static final String OOZIE_CREDENTIALS_SKIP = "oozie.credentials.skip";
  
+     public XConfiguration workflowConf = null;
+ 
      static {
          DISALLOWED_PROPERTIES.add(HADOOP_USER);
 -        DISALLOWED_PROPERTIES.add(HADOOP_JOB_TRACKER);
          DISALLOWED_PROPERTIES.add(HADOOP_NAME_NODE);
 -        DISALLOWED_PROPERTIES.add(HADOOP_JOB_TRACKER_2);
          DISALLOWED_PROPERTIES.add(HADOOP_YARN_RM);
      }
  
@@@ -163,13 -143,15 +165,15 @@@
          super(type);
      }
  
 -    public static List<Class> getCommonLauncherClasses() {
 -        List<Class> classes = new ArrayList<Class>();
 -        classes.add(LauncherMapper.class);
 +    public static List<Class<?>> getCommonLauncherClasses() {
 +        List<Class<?>> classes = new ArrayList<Class<?>>();
          classes.add(OozieLauncherInputFormat.class);
 +        classes.add(LauncherMain.class);
+         classes.add(OozieLauncherOutputFormat.class);
+         classes.add(OozieLauncherOutputCommitter.class);
 -        classes.add(LauncherMainHadoopUtils.class);
 -        classes.add(HadoopShims.class);
          classes.addAll(Services.get().get(URIHandlerService.class).getClassesForLauncher());
 +        classes.add(LauncherAM.class);
 +        classes.add(LauncherAMCallbackNotifier.class);
          return classes;
      }
  
@@@ -1451,104 -1425,114 +1443,105 @@@
  
      @Override
      public void check(Context context, WorkflowAction action) throws ActionExecutorException {
 -        JobClient jobClient = null;
 -        boolean exception = false;
 +        boolean fallback = false;
 +        LOG = XLog.resetPrefix(LOG);
          LogUtils.setLogInfo(action);
 +        YarnClient yarnClient = null;
          try {
              Element actionXml = XmlUtils.parseXml(action.getConf());
 -            FileSystem actionFs = context.getAppFileSystem();
              JobConf jobConf = createBaseHadoopConf(context, actionXml);
 -            jobClient = createJobClient(context, jobConf);
 -            RunningJob runningJob = getRunningJob(context, action, jobClient);
 -            if (runningJob == null) {
 -                context.setExecutionData(FAILED, null);
 -                throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
 -                        "Could not lookup launched hadoop Job ID [{0}] which was associated with " +
 -                        " action [{1}].  Failing this action!", getActualExternalId(action), action.getId());
 +            FileSystem actionFs = context.getAppFileSystem();
 +            yarnClient = createYarnClient(context, jobConf);
 +            FinalApplicationStatus appStatus = null;
 +            try {
 +                ApplicationReport appReport =
 +                        yarnClient.getApplicationReport(ConverterUtils.toApplicationId(action.getExternalId()));
 +                YarnApplicationState appState = appReport.getYarnApplicationState();
 +                if (appState == YarnApplicationState.FAILED || appState == YarnApplicationState.FINISHED
 +                        || appState == YarnApplicationState.KILLED) {
 +                    appStatus = appReport.getFinalApplicationStatus();
 +                }
 +
 +            } catch (Exception ye) {
 +                LOG.debug("Exception occurred while checking Launcher AM status; will try checking action data file instead ", ye);
 +                // Fallback to action data file if we can't find the Launcher AM (maybe it got purged)
 +                fallback = true;
              }
 -            if (runningJob.isComplete()) {
 +            if (appStatus != null || fallback) {
                  Path actionDir = context.getActionDir();
 -                String newId = null;
                  // load sequence file into object
                  Map<String, String> actionData = LauncherMapperHelper.getActionData(actionFs, actionDir, jobConf);
 -                if (actionData.containsKey(LauncherMapper.ACTION_DATA_NEW_ID)) {
 -                    newId = actionData.get(LauncherMapper.ACTION_DATA_NEW_ID);
 -                    String launcherId = action.getExternalId();
 -                    runningJob = jobClient.getJob(JobID.forName(newId));
 -                    if (runningJob == null) {
 -                        context.setExternalStatus(FAILED);
 +                if (fallback) {
 +                    String finalStatus = actionData.get(LauncherAM.ACTION_DATA_FINAL_STATUS);
 +                    if (finalStatus != null) {
 +                        appStatus = FinalApplicationStatus.valueOf(finalStatus);
 +                    } else {
 +                        context.setExecutionData(FAILED, null);
                          throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
 -                                "Unknown hadoop job [{0}] associated with action [{1}].  Failing this action!", newId,
 -                                action.getId());
 +                                "Unknown hadoop job [{0}] associated with action [{1}] and couldn't determine status from" +
 +                                        " action data.  Failing this action!", action.getExternalId(), action.getId());
                      }
 -                    context.setExternalChildIDs(newId);
 -                    LOG.info(XLog.STD, "External ID swap, old ID [{0}] new ID [{1}]", launcherId,
 -                            newId);
                  }
 -                else {
 -                    String externalIDs = actionData.get(LauncherMapper.ACTION_DATA_EXTERNAL_CHILD_IDS);
 -                    if (externalIDs != null) {
 -                        context.setExternalChildIDs(externalIDs);
 -                        LOG.info(XLog.STD, "Hadoop Jobs launched : [{0}]", externalIDs);
 +
 +                String externalID = actionData.get(LauncherAM.ACTION_DATA_NEW_ID);  // MapReduce was launched
 +                if (externalID != null) {
 +                    context.setExternalChildIDs(externalID);
 +                    LOG.info(XLog.STD, "Hadoop Job was launched : [{0}]", externalID);
 +                }
 +
 +               // Multiple child IDs - Pig or Hive action
 +                String externalIDs = actionData.get(LauncherAM.ACTION_DATA_EXTERNAL_CHILD_IDS);
 +                if (externalIDs != null) {
 +                    context.setExternalChildIDs(externalIDs);
 +                    LOG.info(XLog.STD, "External Child IDs  : [{0}]", externalIDs);
++
 +                }
 +
 +                LOG.info(XLog.STD, "action completed, external ID [{0}]", action.getExternalId());
 +                context.setExecutionData(appStatus.toString(), null);
 +                if (appStatus == FinalApplicationStatus.SUCCEEDED) {
 +                    if (getCaptureOutput(action) && LauncherMapperHelper.hasOutputData(actionData)) {
 +                        context.setExecutionData(SUCCEEDED, PropertiesUtils.stringToProperties(actionData
 +                                .get(LauncherAM.ACTION_DATA_OUTPUT_PROPS)));
 +                        LOG.info(XLog.STD, "action produced output");
                      }
 -                    else if (LauncherMapperHelper.hasOutputData(actionData)) {
 -                        // Load stored Hadoop jobs ids and promote them as external child ids
 -                        // This is for jobs launched with older release during upgrade to Oozie 4.3
 -                        Properties props = PropertiesUtils.stringToProperties(actionData
 -                                .get(LauncherMapper.ACTION_DATA_OUTPUT_PROPS));
 -                        if (props.get(LauncherMain.HADOOP_JOBS) != null) {
 -                            externalIDs = (String) props.get(LauncherMain.HADOOP_JOBS);
 -                            context.setExternalChildIDs(externalIDs);
 -                            LOG.info(XLog.STD, "Hadoop Jobs launched : [{0}]", externalIDs);
 -                        }
 +                    else {
 +                        context.setExecutionData(SUCCEEDED, null);
                      }
 -                }
 -                if (runningJob.isComplete()) {
 -                    // fetching action output and stats for the Map-Reduce action.
 -                    if (newId != null) {
 -                        actionData = LauncherMapperHelper.getActionData(actionFs, context.getActionDir(), jobConf);
 +                    if (LauncherMapperHelper.hasStatsData(actionData)) {
 +                        context.setExecutionStats(actionData.get(LauncherAM.ACTION_DATA_STATS));
 +                        LOG.info(XLog.STD, "action produced stats");
                      }
 -                    LOG.info(XLog.STD, "action completed, external ID [{0}]",
 -                            action.getExternalId());
 -                    if (LauncherMapperHelper.isMainSuccessful(runningJob)) {
 -                        if (getCaptureOutput(action) && LauncherMapperHelper.hasOutputData(actionData)) {
 -                            context.setExecutionData(SUCCEEDED, PropertiesUtils.stringToProperties(actionData
 -                                    .get(LauncherMapper.ACTION_DATA_OUTPUT_PROPS)));
 -                            LOG.info(XLog.STD, "action produced output");
 +                    getActionData(actionFs, action, context);
 +                }
 +                else {
 +                    String errorReason;
 +                    if (actionData.containsKey(LauncherAM.ACTION_DATA_ERROR_PROPS)) {
 +                        Properties props = PropertiesUtils.stringToProperties(actionData
 +                                .get(LauncherAM.ACTION_DATA_ERROR_PROPS));
 +                        String errorCode = props.getProperty("error.code");
 +                        if ("0".equals(errorCode)) {
 +                            errorCode = "JA018";
                          }
 -                        else {
 -                            context.setExecutionData(SUCCEEDED, null);
 +                        if ("-1".equals(errorCode)) {
 +                            errorCode = "JA019";
                          }
 -                        if (LauncherMapperHelper.hasStatsData(actionData)) {
 -                            context.setExecutionStats(actionData.get(LauncherMapper.ACTION_DATA_STATS));
 -                            LOG.info(XLog.STD, "action produced stats");
 +                        errorReason = props.getProperty("error.reason");
 +                        LOG.warn("Launcher ERROR, reason: {0}", errorReason);
 +                        String exMsg = props.getProperty("exception.message");
 +                        String errorInfo = (exMsg != null) ? exMsg : errorReason;
 +                        context.setErrorInfo(errorCode, errorInfo);
 +                        String exStackTrace = props.getProperty("exception.stacktrace");
 +                        if (exMsg != null) {
 +                            LOG.warn("Launcher exception: {0}{E}{1}", exMsg, exStackTrace);
                          }
 -                        getActionData(actionFs, runningJob, action, context);
                      }
                      else {
 -                        String errorReason;
 -                        if (actionData.containsKey(LauncherMapper.ACTION_DATA_ERROR_PROPS)) {
 -                            Properties props = PropertiesUtils.stringToProperties(actionData
 -                                    .get(LauncherMapper.ACTION_DATA_ERROR_PROPS));
 -                            String errorCode = props.getProperty("error.code");
 -                            if ("0".equals(errorCode)) {
 -                                errorCode = "JA018";
 -                            }
 -                            if ("-1".equals(errorCode)) {
 -                                errorCode = "JA019";
 -                            }
 -                            errorReason = props.getProperty("error.reason");
 -                            LOG.warn("Launcher ERROR, reason: {0}", errorReason);
 -                            String exMsg = props.getProperty("exception.message");
 -                            String errorInfo = (exMsg != null) ? exMsg : errorReason;
 -                            context.setErrorInfo(errorCode, errorInfo);
 -                            String exStackTrace = props.getProperty("exception.stacktrace");
 -                            if (exMsg != null) {
 -                                LOG.warn("Launcher exception: {0}{E}{1}", exMsg, exStackTrace);
 -                            }
 -                        }
 -                        else {
 -                            errorReason = XLog.format("LauncherMapper died, check Hadoop LOG for job [{0}:{1}]", action
 -                                    .getTrackerUri(), action.getExternalId());
 -                            LOG.warn(errorReason);
 -                        }
 -                        context.setExecutionData(FAILED_KILLED, null);
 +                        errorReason = XLog.format("Launcher AM died, check Hadoop LOG for job [{0}:{1}]", action
 +                                .getTrackerUri(), action.getExternalId());
 +                        LOG.warn(errorReason);
                      }
 -                }
 -                else {
 -                    context.setExternalStatus("RUNNING");
 -                    LOG.info(XLog.STD, "checking action, hadoop job ID [{0}] status [RUNNING]",
 -                            runningJob.getID());
 +                    context.setExecutionData(FAILED_KILLED, null);
                  }
              }
              else {

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
index bb58ad5,9609fdc..e2a667e
--- a/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
@@@ -38,11 -39,12 +38,12 @@@ import org.apache.hadoop.fs.FileSystem
  import org.apache.hadoop.fs.Path;
  import org.apache.hadoop.io.SequenceFile;
  import org.apache.hadoop.io.Text;
 +import org.apache.hadoop.mapred.Counters;
  import org.apache.hadoop.mapred.JobConf;
  import org.apache.hadoop.mapred.RunningJob;
 -import org.apache.hadoop.mapred.Counters;
  import org.apache.hadoop.security.UserGroupInformation;
  import org.apache.oozie.client.OozieClient;
+ import org.apache.oozie.client.WorkflowAction;
  import org.apache.oozie.service.HadoopAccessorException;
  import org.apache.oozie.service.HadoopAccessorService;
  import org.apache.oozie.service.Services;
@@@ -51,9 -53,10 +52,11 @@@ import org.apache.oozie.service.UserGro
  import org.apache.oozie.util.IOUtils;
  import org.apache.oozie.util.PropertiesUtils;
  
 +// TODO: we're no longer using Launcher Mapper -- give this class a better name
  public class LauncherMapperHelper {
  
+     public static final String OOZIE_ACTION_YARN_TAG = "oozie.action.yarn.tag";
+ 
      public static String getRecoveryId(Configuration launcherConf, Path actionDir, String recoveryId)
              throws HadoopAccessorException, IOException {
          String jobId = null;
@@@ -145,8 -148,21 +148,9 @@@
            launcherConf.setBoolean("oozie.hadoop-2.0.2-alpha.workaround.for.distributed.cache", true);
          }
  
 -        FileSystem fs =
 -          Services.get().get(HadoopAccessorService.class).createFileSystem(launcherConf.get("user.name"),
 -                                                                           actionDir.toUri(), launcherConf);
 -        fs.mkdirs(actionDir);
 -
 -        OutputStream os = fs.create(new Path(actionDir, LauncherMapper.ACTION_CONF_XML));
 -        try {
 -            actionConf.writeXml(os);
 -        } finally {
 -            IOUtils.closeSafely(os);
 -        }
 -
          launcherConf.setInputFormat(OozieLauncherInputFormat.class);
-         launcherConf.set("mapred.output.dir", new Path(actionDir, "output").toString());
+         launcherConf.setOutputFormat(OozieLauncherOutputFormat.class);
+         launcherConf.setOutputCommitter(OozieLauncherOutputCommitter.class);
      }
  
      public static void setupYarnRestartHandling(JobConf launcherJobConf, Configuration actionConf, String launcherTag,
@@@ -157,10 -173,10 +161,10 @@@
          String tag = getTag(launcherTag);
          // keeping the oozie.child.mapreduce.job.tags instead of mapreduce.job.tags to avoid killing launcher itself.
          // mapreduce.job.tags should only go to child job launch by launcher.
 -        actionConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, tag);
 +        actionConf.set(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS, tag);
      }
  
-     private static String getTag(String launcherTag) throws NoSuchAlgorithmException {
+     public static String getTag(String launcherTag) throws NoSuchAlgorithmException {
          MessageDigest digest = MessageDigest.getInstance("MD5");
          digest.update(launcherTag.getBytes(), 0, launcherTag.length());
          String md5 = "oozie-" + new BigInteger(1, digest.digest()).toString(16);

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
index 019c4d9,c36a675..89f30f1
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
@@@ -314,80 -334,19 +314,86 @@@ public class MapReduceActionExecutor ex
      }
  
      @Override
 -    protected RunningJob getRunningJob(Context context, WorkflowAction action, JobClient jobClient) throws Exception{
 +    protected void injectCallback(Context context, Configuration conf) {
 +        // add callback for the MapReduce job
 +        String callback = context.getCallbackUrl("$jobStatus");
 +        if (conf.get("job.end.notification.url") != null) {
 +            LOG.warn("Overriding the action job end notification URI");
 +        }
 +        conf.set("job.end.notification.url", callback);
 +
 +        super.injectCallback(context, conf);
 +    }
  
 -        RunningJob runningJob;
 -        String jobId = getActualExternalId(action);
 +    @Override
 +    public void check(Context context, WorkflowAction action) throws ActionExecutorException {
 +        Map<String, String> actionData = Collections.emptyMap();
 +        JobConf jobConf = null;
 +
 +        try {
 +            FileSystem actionFs = context.getAppFileSystem();
 +            Element actionXml = XmlUtils.parseXml(action.getConf());
 +            jobConf = createBaseHadoopConf(context, actionXml);
 +            Path actionDir = context.getActionDir();
 +            actionData = LauncherMapperHelper.getActionData(actionFs, actionDir, jobConf);
 +        } catch (Exception e) {
 +            LOG.warn("Exception in check(). Message[{0}]", e.getMessage(), e);
 +            throw convertException(e);
 +        }
  
 -        runningJob = jobClient.getJob(JobID.forName(jobId));
 +        final String newId = actionData.get(LauncherMapper.ACTION_DATA_NEW_ID);
  
 -        return runningJob;
 +        // check the Hadoop job if newID is defined (which should be the case here) - otherwise perform the normal check()
 +        if (newId != null) {
 +            boolean jobCompleted;
 +            JobClient jobClient = null;
 +            boolean exception = false;
 +
 +            try {
 +                jobClient = createJobClient(context, jobConf);
 +                RunningJob runningJob = jobClient.getJob(JobID.forName(newId));
 +
 +                if (runningJob == null) {
 +                    context.setExternalStatus(FAILED);
 +                    throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
 +                            "Unknown hadoop job [{0}] associated with action [{1}].  Failing this action!", newId,
 +                            action.getId());
 +                }
 +
 +                jobCompleted = runningJob.isComplete();
 +            } catch (Exception e) {
 +                LOG.warn("Exception in check(). Message[{0}]", e.getMessage(), e);
 +                exception = true;
 +                throw convertException(e);
 +            } finally {
 +                if (jobClient != null) {
 +                    try {
 +                        jobClient.close();
 +                    } catch (Exception e) {
 +                        if (exception) {
 +                            LOG.error("JobClient error (not re-throwing due to a previous error): ", e);
 +                        } else {
 +                            throw convertException(e);
 +                        }
 +                    }
 +                }
 +            }
 +
 +            // run original check() if the MR action is completed or there are errors - otherwise mark it as RUNNING
 +            if (jobCompleted || (!jobCompleted && actionData.containsKey(LauncherMapper.ACTION_DATA_ERROR_PROPS))) {
 +                super.check(context, action);
 +            } else {
 +                context.setExternalStatus(RUNNING);
 +                context.setExternalChildIDs(newId);
 +            }
 +        } else {
 +            super.check(context, action);
 +        }
      }
+ 
+     @Override
+     void injectActionCallback(Context context, Configuration actionConf) {
+         injectCallback(context, actionConf);
+     }
+ 
  }

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/ShellActionExecutor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
index 5f33bb2,8e8d7d3..32dadf0
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
@@@ -34,12 -28,16 +28,15 @@@ import org.apache.oozie.service.Configu
  import org.apache.oozie.service.Services;
  import org.apache.oozie.service.SparkConfigurationService;
  import org.jdom.Element;
- import org.jdom.JDOMException;
  import org.jdom.Namespace;
  
+ import java.util.ArrayList;
+ import java.util.List;
+ import java.util.Map;
+ 
  public class SparkActionExecutor extends JavaActionExecutor {
      public static final String SPARK_MAIN_CLASS_NAME = "org.apache.oozie.action.hadoop.SparkMain";
 -    public static final String TASK_USER_PRECEDENCE = "mapreduce.task.classpath.user.precedence"; // hadoop-2
 -    public static final String TASK_USER_CLASSPATH_PRECEDENCE = "mapreduce.user.classpath.first";  // hadoop-1
 +    public static final String TASK_USER_PRECEDENCE = "mapreduce.task.classpath.user.precedence";
      public static final String SPARK_MASTER = "oozie.spark.master";
      public static final String SPARK_MODE = "oozie.spark.mode";
      public static final String SPARK_OPTS = "oozie.spark.spark-opts";

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
index c3a09ac,6cee32a..b0fc83a
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
@@@ -18,15 -18,7 +18,13 @@@
  
  package org.apache.oozie.action.hadoop;
  
 +import java.io.IOException;
 +import java.io.StringReader;
- import java.net.URISyntaxException;
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.StringTokenizer;
 +
  import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.fs.FileSystem;
  import org.apache.hadoop.fs.Path;
  import org.apache.hadoop.mapred.Counters;
  import org.apache.hadoop.mapred.JobClient;
@@@ -35,14 -27,18 +33,12 @@@ import org.apache.hadoop.mapred.JobID
  import org.apache.hadoop.mapred.RunningJob;
  import org.apache.oozie.action.ActionExecutorException;
  import org.apache.oozie.client.WorkflowAction;
- import org.apache.oozie.service.HadoopAccessorException;
  import org.apache.oozie.util.XConfiguration;
- import org.apache.oozie.util.XmlUtils;
  import org.apache.oozie.util.XLog;
+ import org.apache.oozie.util.XmlUtils;
  import org.jdom.Element;
- import org.jdom.JDOMException;
  import org.jdom.Namespace;
  
 -import java.io.IOException;
 -import java.io.StringReader;
 -import java.util.ArrayList;
 -import java.util.List;
 -import java.util.StringTokenizer;
 -
  public class SqoopActionExecutor extends JavaActionExecutor {
  
    public static final String OOZIE_ACTION_EXTERNAL_STATS_WRITE = "oozie.action.external.stats.write";
@@@ -229,26 -225,6 +225,7 @@@
          }
      }
  
-     /**
-      * Get the stats and external child IDs
-      *  @param actionFs the FileSystem object
-      * @param action the Workflow action
-      * @param context executor context
-      *
-      */
-     @Override
-     protected void getActionData(FileSystem actionFs, WorkflowAction action, Context context)
-             throws HadoopAccessorException, JDOMException, IOException, URISyntaxException{
-         super.getActionData(actionFs, action, context);
-         readExternalChildIDs(action, context);
-     }
- 
-     @Override
-     protected boolean getCaptureOutput(WorkflowAction action) throws JDOMException {
-         return true;
-     }
- 
 +
      /**
       * Return the sharelib name for the action.
       *

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
index 64fa89a,cc61d3d..26deda8
--- a/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
@@@ -41,15 -41,11 +41,12 @@@ public class SubmitMRXCommand extends S
  
      static {
          SKIPPED_CONFS.add(WorkflowAppService.HADOOP_USER);
 -        SKIPPED_CONFS.add(XOozieClient.JT);
 +        SKIPPED_CONFS.add(XOozieClient.RM);
          SKIPPED_CONFS.add(XOozieClient.NN);
-         // a brillant mind made a change in Configuration that 'fs.default.name' key gets converted to 'fs.defaultFS'
-         // in Hadoop 0.23, we need skip that one too, keeping the old one because of Hadoop 1
-         SKIPPED_CONFS.add(XOozieClient.NN_2);
  
          DEPRECATE_MAP.put(XOozieClient.NN, XOozieClient.NN_2);
 -        DEPRECATE_MAP.put(XOozieClient.JT, XOozieClient.JT_2);
 +        DEPRECATE_MAP.put(XOozieClient.RM, "yarn.resourcemanager.address");
 +        DEPRECATE_MAP.put(XOozieClient.NN, "fs.defaultFS");
          DEPRECATE_MAP.put(WorkflowAppService.HADOOP_USER, "mapreduce.job.user.name");
      }
  

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
index 5845e17,5377127..0824503
--- a/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
+++ b/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
@@@ -30,15 -29,9 +30,16 @@@ import org.apache.hadoop.net.NetUtils
  import org.apache.hadoop.security.SecurityUtil;
  import org.apache.hadoop.security.UserGroupInformation;
  import org.apache.hadoop.security.token.Token;
 +import org.apache.hadoop.yarn.api.records.LocalResource;
 +import org.apache.hadoop.yarn.api.records.LocalResourceType;
 +import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 +import org.apache.hadoop.yarn.client.api.YarnClient;
 +import org.apache.hadoop.yarn.util.ConverterUtils;
 +import org.apache.hadoop.yarn.util.Records;
  import org.apache.oozie.ErrorCode;
 +import org.apache.oozie.action.ActionExecutor;
  import org.apache.oozie.action.hadoop.JavaActionExecutor;
+ import org.apache.oozie.util.IOUtils;
  import org.apache.oozie.util.ParamChecker;
  import org.apache.oozie.util.XConfiguration;
  import org.apache.oozie.util.XLog;

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/service/ShareLibService.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/java/org/apache/oozie/util/JobUtils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/main/resources/oozie-default.xml
----------------------------------------------------------------------
diff --cc core/src/main/resources/oozie-default.xml
index 5f4645c,e71ebe3..59b359b
--- a/core/src/main/resources/oozie-default.xml
+++ b/core/src/main/resources/oozie-default.xml
@@@ -1782,6 -1790,42 +1790,17 @@@ will be the requeue interval for the ac
      </property>
  
      <property>
 -        <name>oozie.action.launcher.mapreduce.job.ubertask.enable</name>
 -        <value>true</value>
 -        <description>
 -            Enables Uber Mode for the launcher job in YARN/Hadoop 2 (no effect in Hadoop 1) for all action types by default.
 -            This can be overridden on a per-action-type basis by setting
 -            oozie.action.#action-type#.launcher.mapreduce.job.ubertask.enable in oozie-site.xml (where #action-type# is the action
 -            type; for example, "pig").  And that can be overridden on a per-action basis by setting
 -            oozie.launcher.mapreduce.job.ubertask.enable in an action's configuration section in a workflow.  In summary, the
 -            priority is this:
 -            1. action's configuration section in a workflow
 -            2. oozie.action.#action-type#.launcher.mapreduce.job.ubertask.enable in oozie-site
 -            3. oozie.action.launcher.mapreduce.job.ubertask.enable in oozie-site
 -        </description>
 -    </property>
 -
 -    <property>
 -        <name>oozie.action.shell.launcher.mapreduce.job.ubertask.enable</name>
 -        <value>false</value>
 -        <description>
 -            The Shell action may have issues with the $PATH environment when using Uber Mode, and so Uber Mode is disabled by
 -            default for it.  See oozie.action.launcher.mapreduce.job.ubertask.enable
 -        </description>
 -    </property>
 -
 -    <property>
+         <name>oozie.action.spark.setup.hadoop.conf.dir</name>
+         <value>false</value>
+         <description>
+             Oozie action.xml (oozie.action.conf.xml) contains all the hadoop configuration and user provided configurations.
+             This property will allow users to copy Oozie action.xml as hadoop *-site configurations files. The advantage is,
+             user need not to manage these files into spark sharelib. If user wants to manage the hadoop configurations
+             themselves, it should should disable it.
+         </description>
+     </property>
+ 
+     <property>
          <name>oozie.action.shell.setup.hadoop.conf.dir</name>
          <value>false</value>
          <description>

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index 8adc606,75301db..5d8bf34
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@@ -490,7 -543,73 +494,7 @@@ public class TestJavaActionExecutor ext
          assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
      }
  
 -    public void testChildKill() throws Exception {
 -        if (HadoopShims.isYARN()) {
 -            final JobConf clusterConf = createJobConf();
 -            FileSystem fileSystem = FileSystem.get(clusterConf);
 -            Path confFile = new Path("/tmp/cluster-conf.xml");
 -            OutputStream out = fileSystem.create(confFile);
 -            clusterConf.writeXml(out);
 -            out.close();
 -            String confFileName = fileSystem.makeQualified(confFile).toString() + "#core-site.xml";
 -            final String actionXml = "<java>" +
 -                    "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
 -                    "<name-node>" + getNameNodeUri() + "</name-node>" +
 -                    "<main-class> " + SleepJob.class.getName() + " </main-class>" +
 -                    "<arg>-mt</arg>" +
 -                    "<arg>300000</arg>" +
 -                    "<archive>" + confFileName + "</archive>" +
 -                    "</java>";
 -            final Context context = createContext(actionXml, null);
 -            final RunningJob runningJob = submitAction(context);
 -            waitFor(60 * 1000, new Predicate() {
 -                @Override
 -                public boolean evaluate() throws Exception {
 -                    return runningJob.getJobStatus().getRunState() == 1;
 -                }
 -            });
 -            assertFalse(runningJob.isComplete());
 -            Thread.sleep(15000);
 -            UserGroupInformationService ugiService = Services.get().
 -                    get(UserGroupInformationService.class);
 -
 -            UserGroupInformation ugi = ugiService.getProxyUser(getTestUser());
 -            ugi.doAs(new PrivilegedExceptionAction<Object>() {
 -                @Override
 -                public Void run() throws Exception {
 -                    JavaActionExecutor ae = new JavaActionExecutor();
 -                    ae.kill(context, context.getAction());
 -
 -                    WorkflowJob wfJob = context.getWorkflow();
 -                    Configuration conf = null;
 -                    if (wfJob.getConf() != null) {
 -                        conf = new XConfiguration(new StringReader(wfJob.getConf()));
 -                    }
 -                    String launcherTag = LauncherMapperHelper.getActionYarnTag(conf, wfJob.getParentId(), context.getAction());
 -                    Configuration jobConf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
 -                    jobConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, LauncherMapperHelper.getTag(launcherTag));
 -                    jobConf.setLong(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME,
 -                            context.getAction().getStartTime().getTime());
 -                    Set<String> childSet = LauncherMainHadoopUtils.getChildJobs(jobConf);
 -                    assertEquals(1, childSet.size());
 -
 -                    JobClient jobClient = new JobClient(clusterConf);
 -                    for (String jobId : childSet) {
 -                        RunningJob childJob = jobClient.getJob(jobId);
 -                        assertEquals(JobStatus.State.KILLED.getValue(), childJob.getJobStatus().getRunState());
 -                    }
 -                    assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
 -                    return null;
 -                }
 -            });
 -
 -            assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
 -            assertEquals("KILLED", context.getAction().getExternalStatus());
 -            assertFalse(runningJob.isSuccessful());
 -        }
 -    }
 -
--        public void testExceptionSubmitException() throws Exception {
++    public void testExceptionSubmitException() throws Exception {
          String actionXml = "<java>" +
                  "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
                  "<name-node>" + getNameNodeUri() + "</name-node>" +

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/client/TestWorkflowXClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/service/TestConfigurationService.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --cc docs/src/site/twiki/ENG_Building.twiki
index cce219e,f6c88d6..a817b03
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@@ -212,10 -212,12 +212,10 @@@ $ bin/mkdistro.sh [-DskipTests
  Running =mkdistro.sh= will create the binary distribution of Oozie. The following options are available to customise
  the versions of the dependencies:
  -Puber - Bundle required hadoop and hcatalog libraries in oozie war
 --P<profile> - default hadoop-2. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
 -profile depending on the hadoop version used.
 --Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.4.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
 +-Dhadoop.version=<version> - default 2.4.0
  -Dhadoop.auth.version=<version> - defaults to hadoop version
  -Ddistcp.version=<version> - defaults to hadoop version
- -Dpig.version=<version> - default 0.12.1
+ -Dpig.version=<version> - default 0.16.0
  -Dpig.classifier=<classifier> - default none
  -Dsqoop.version=<version> - default 1.4.3
  -Dsqoop.classifier=<classifier> - default hadoop100

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index 9d86e71,704a2ee..ef22b39
--- a/pom.xml
+++ b/pom.xml
@@@ -77,32 -79,40 +79,37 @@@
          <oozie.test.config.file>${oozie.test.default.config.file}</oozie.test.config.file>
  
          <hadoop.version>2.4.0</hadoop.version>
 -        <hadoop.majorversion>2</hadoop.majorversion>
 -        <hadooplib.version>hadoop-${hadoop.majorversion}-${project.version}</hadooplib.version>
 -        <oozie.hadoop-utils.version>hadoop-${hadoop.majorversion}-${project.version}</oozie.hadoop-utils.version>
          <hbase.version>0.94.27</hbase.version>
  
-         <dropwizard.metrics.version>3.1.0</dropwizard.metrics.version>
+         <dropwizard.metrics.version>3.1.2</dropwizard.metrics.version>
  
          <clover.license>/home/jenkins/tools/clover/latest/lib/clover.license</clover.license>
  
 +
           <!-- Sharelib component versions -->
           <hive.version>0.13.1</hive.version>
+          <hive.jline.version>0.9.94</hive.jline.version>
 -         <pig.version>0.16.0</pig.version>
 -         <pig.classifier></pig.classifier>
 +         <pig.version>0.12.1</pig.version>
 +         <pig.classifier>h2</pig.classifier>
           <sqoop.version>1.4.3</sqoop.version>
           <spark.version>1.6.1</spark.version>
           <spark.guava.version>14.0.1</spark.guava.version>
+          <spark.scala.binary.version>2.10</spark.scala.binary.version>
 -         <sqoop.classifier>hadoop100</sqoop.classifier>
 +         <sqoop.classifier>hadoop200</sqoop.classifier>
           <streaming.version>${hadoop.version}</streaming.version>
 -         <distcp.version>${hadoop.version}</distcp.version>
           <hadoop.auth.version>${hadoop.version}</hadoop.auth.version>
  
           <!-- Tomcat version -->
           <tomcat.version>6.0.44</tomcat.version>
  
-          <openjpa.version>2.2.2</openjpa.version>
+          <openjpa.version>2.4.1</openjpa.version>
           <xerces.version>2.10.0</xerces.version>
           <curator.version>2.5.0</curator.version>
 -         <jackson.version>1.8.8</jackson.version>
 +         <jackson.version>1.9.13</jackson.version>
           <log4j.version>1.2.17</log4j.version>
+          <activemq.version>5.13.3</activemq.version>
+          <httpcore.version>4.3.3</httpcore.version>
+          <httpclient.version>4.3.6</httpclient.version>
      </properties>
  
      <modules>
@@@ -1562,8 -1636,8 +1613,9 @@@
              <plugin>
                  <groupId>org.codehaus.mojo</groupId>
                  <artifactId>findbugs-maven-plugin</artifactId>
 +                <version>3.0.1</version>
                  <configuration>
+                     <excludeSubProjects>false</excludeSubProjects>
                      <xmlOutput>true</xmlOutput>
                      <findbugsXmlOutput>true</findbugsXmlOutput>
                      <findbugsXmlWithMessages>true</findbugsXmlWithMessages>
@@@ -1728,29 -1878,14 +1831,27 @@@
              <activation>
                  <activeByDefault>false</activeByDefault>
              </activation>
 -            <properties>
 -                <hadoop.version>3.0.0-SNAPSHOT</hadoop.version>
 -                <hadoop.majorversion>3</hadoop.majorversion>
 -                <pig.classifier>h2</pig.classifier>
 -                <sqoop.classifier>hadoop200</sqoop.classifier>
 -                <jackson.version>1.9.13</jackson.version>
 -            </properties>
 +            <modules>
 +                <module>workflowgenerator</module>
 +            </modules>
 +            <build>
 +                <plugins>
 +                    <plugin>
 +                        <groupId>org.apache.maven.plugins</groupId>
 +                        <artifactId>maven-compiler-plugin</artifactId>
 +                        <configuration>
 +                            <fork>true</fork>
 +                            <source>1.6</source>
 +                            <target>1.6</target>
 +                            <compilerArguments>
 +                                <Xlint/>
 +                                <Xmaxwarns>9999</Xmaxwarns>
 +                            </compilerArguments>
 +                        </configuration>
 +                    </plugin>
 +                </plugins>
 +            </build>
- 
          </profile>
- 
          <profile>
              <id>loginServerExample</id>
              <activation>

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/release-log.txt
----------------------------------------------------------------------
diff --cc release-log.txt
index c75911e,13c11df..af44107
--- a/release-log.txt
+++ b/release-log.txt
@@@ -1,7 -1,100 +1,102 @@@
- -- Oozie 4.3.0 release (trunk - unreleased)
+ -- Oozie 4.4.0 release (trunk - unreleased)
  
 +OOZIE-2590 OYA: Create basic Oozie Launcher Application Master (rkanter)
 +OOZIE-2316 Drop support for Hadoop 1 and 0.23 (asasvari via rkanter)
+ 
+ -- Oozie 4.3.0 release
+ 
+ OOZIE-2673 Include XSD for shell-action:0.3 in documentation (abhishekbafna via rkanter)
+ OOZIE-2194 oozie job -kill doesn't work with spark action (abhishekbafna via rohini)
+ OOZIE-2501 ZK reentrant lock doesn't work for few cases (puru)
+ OOZIE-2582 Populating external child Ids for action failures (abhishekbafna via rohini)
+ OOZIE-2678 Oozie job -kill doesn't work with tez jobs (abhishekbafna via rohini)
+ OOZIE-2676 Make hadoop-2 as the default profile (gezapeti via rkanter)
+ OOZIE-2487 Temporary workaround for Java 8 Javadoc errors (abhishekbafna via rkanter)
+ OOZIE-1978 Forkjoin validation code is ridiculously slow in some cases (pbacsko via rkanter)
+ OOZIE-2682 Oozie test-patch script is not updating Jira with jenkins test report (abhishekbafna via rkanter)
+ OOZIE-2679 Decrease HttpClient library versions due to Hadoop incompatibility (abhishekbafna via rkanter)
+ OOZIE-2661 Coordinator Action status not updated when workflow job SUSPENDED (satishsaley via puru)
+ OOZIE-2273 MiniOozie does not work outside of Oozie (rkanter)
+ OOZIE-2667 Optimize queries for DB export (gezapeti via rkanter)
+ OOZIE-1793 Improve find bugs reporting for Oozie (rkanter)
+ OOZIE-2572 SLA DURATION miss not shown when job is running for longer than expected time (satishsaley via puru)
+ OOZIE-2525 SchemaChecker fails with NPE (rkanter)
+ OOZIE-2672 SLA periodic update does not remove job from map if job is removed from database (satishsaley via puru)
+ OOZIE-2498 Oozie CallerId configuration for downstream components (abhishekbafna via rohini)
+ OOZIE-2491 oozie acl cannot specify group,it does`t work (abhishekbafna via rohini)
+ OOZIE-2569 Adding yarn-site, core-site, hdfs-site and mapred-site into spark launcher (abhishekbafna via rohini)
+ OOZIE-2675 Drop support for hadoop 0.23 (abhishekbafna via rohini)
+ OOZIE-2588 Support getting credentials for same cluster hcat when credentials config is empty (satishsaley via rohini)
+ OOZIE-2538 Update HttpClient versions to close security vulnerabilities (abhishekbafna via rkanter)
+ OOZIE-2037 Add TLSv1.1,TLSv1.2 (rkanter)
+ OOZIE-2500 -DtestJarSimple option mentioned in minioozie doc does not work (abhishekbafna via rkanter)
+ OOZIE-2552 Update ActiveMQ version for security and other fixes (asasvari via rkanter)
+ OOZIE-2571 Add spark.scala.binary.version Maven property so that Scala 2.11 can be used (jonathak via rkanter)
+ OOZIE-2530 Allow Hive to use a different jline version (poeppt via rkanter)
+ OOZIE-2660 Create documentation for DB Dump/Load functionality (gezapeti via rkanter)
+ OOZIE-2659 TestPauseTransitService is flaky (gezapeti via rkanter)
+ OOZIE-2488 Upgrade to latest OpenJPA version (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2648 Child jobs shouldn't send callbacks to Oozie (abhishekbafna via rkanter)
+ OOZIE-2584 Eliminate Thread.sleep() calls in TestMemoryLocks (pbacsko via rkanter)
+ OOZIE-2635 TimeZone.getTimeZone has performance issue. (satishsaley via rkanter)
+ OOZIE-2583 oozie throws EL Exception when reference variable name containing dot (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2653 Close active connection to hcat server in fs action (satishsaley via puru)
+ OOZIE-2617 Read properties files in action configurations (wutaklon via jaydeepvishwakarma)
+ OOZIE-2615 Flaky tests TestCoordActionsKillXCommand.testActionKillCommandActionNumbers and testActionKillCommandDate (pbacsko via rkanter)
+ OOZIE-2632 Provide database dump/load functionality to make database migration easier (gezapeti, rjustice via rkanter)
+ OOZIE-2243 Kill Command does not kill the child job for java action (jaydeepvishwakarma)
+ OOZIE-2649 Can't override sub-workflow configuration property if defined in parent workflow XML (asasvari via rkanter)
+ OOZIE-2656 OozieShareLibCLI uses op system username instead of Kerberos to upload jars (gezapeti via rkanter)
+ OOZIE-1173 Refactor: use ParamChecker inXOozieClient (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2657 Clean up redundant access modifiers from oozie interfaces (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2517 Add support for startCreatedTime and endCreatedTime filters for coord and bundles (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2589 CompletedActionXCommand is hardcoded to wrong priority (tm_linfly via rkanter)
+ OOZIE-2081 WorkflowJob notification to include coordinator action id (seoeun25 via rkanter)
+ OOZIE-2036 Drop support for Java 1.6 (gezapeti via jaydeepvishwakarma)
+ OOZIE-2512 ShareLibservice returns incorrect path for jar (satishsaley via puru)
+ OOZIE-2508 Documentation change for Coord action rerun [OOZIE-1735] (satishsaley via puru)
+ OOZIE-2628 Fix Workflow EL function return types in Documentation (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2623 Oozie should use a dummy OutputFormat (satishsaley via rohini)
+ OOZIE-2625 Drop workflowgenerator (rkanter)
+ OOZIE-2602 Upgrade oozie to pig 0.16.0 (nperiwal via jaydeepvishwakarma)
+ OOZIE-2493 TestDistcpMain deletes action.xml from wrong filesystem (abhishekbafna via rkanter)
+ OOZIE-2496 Testcase failure due to environment specific line.separator value (abhishekbafna via rkanter)
+ OOZIE-2574 Oozie to support replication-enabled mysql urls (abhishekbafna via rkanter)
+ OOZIE-2433 oozie restart required if oozie metrics to graphing tool broken (nperiwal via jaydeepvishwakarma)
+ OOZIE-2244 Oozie should mask passwords in the logs when logging command arguments (venkatnrangan via jaydeepvishwakarma)
+ OOZIE-2516 Update web service documentation for jobs API (abhishekbafna via rkanter)
+ OOZIE-2497 Some tests fail on windows due to hard coded URIs (abhishekbafna via rkanter)
+ OOZIE-2349 Method getCoordJobInfo(String jobId, String filter, int offset, int length, boolean desc) is not present in LocalOozieClientCoord (nperiwal via rkanter)
+ OOZIE-2172 ZooKeeper Security Tests failed with JVM IBM JAVA (GauravPande via rkanter)
+ OOZIE-2555 Oozie SSL enable setup does not return port for admin -servers (abhishekbafna via rkanter)
+ OOZIE-2440 Exponential re-try policy for workflow action (satishsaley via jaydeepvishwakarma)
+ OOZIE-2539 Incorrect property key is used for 'hive log4j configuration file for execution mode' (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2565 [Oozie web Console] Make the timezones in settings tab to be sorted by default (meetchandan via jaydeepvishwakarma)
+ OOZIE-2520 SortBy filter for ordering the jobs query results (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2506 Add logs into RecoverService for logging information about queued commands (abhishekbafna via jaydeepvishwakarma) 
+ OOZIE-2515 Duplicate information for "Changing endtime/pausetime of a Bundle Job" in CommandLineTool wiki (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2390 Rerun with failed option removing completed output data (jaydeepvishwakarma)
+ OOZIE-2462 When calling ParamChecker.notNull() in CoordActionsIgnoreXCommand.java, "Action" should be passed instead of "Action cannot be null" (mballur via jaydeepvishwakarma)
+ OOZIE-2507 Expose monitoring via JMX beans in Oozie (fdenes via rkanter)
+ OOZIE-2581 Oozie should reset SecurityManager in finally block (satishsaley via rohini)
+ OOZIE-2579 Bulk kill tests in TestBulkWorkflowXCommand might fail because of a race condition (pbacsko via rkanter)
+ OOZIE-2587 Disable SchedulerService on certain tests (pbacsko via rkanter)
+ OOZIE-2603 Give thread pools a meaningful name in CallableQueueService and SchedulerService (pbacsko via rkanter)
+ OOZIE-2436 Fork/join workflow fails with "oozie.action.yarn.tag must not be null" (puru)
+ OOZIE-2578 Oozie example distcp job fails to run within an encrypted zone with checksum match error (pbacsko via rkanter)
+ OOZIE-2362 SQL injection in BulkJPAExecutor (pbacsko via rkanter)
+ OOZIE-2577 Flaky tests TestCoordActionInputCheckXCommand.testTimeout and testTimeoutWithException (pbacsko via rkanter)
+ OOZIE-2570 remove -PtestPatchCompile from patch testing as there is no such profile (gezapeti via rkanter)
+ OOZIE-2504 Create a log4j.properties under HADOOP_CONF_DIR in Shell Action (harsh)
+ OOZIE-2567 HCat connection is not closed while getting hcat cred (puru)
+ OOZIE-2547 Add mapreduce.job.cache.files to spark action (satishsaley via rohini)
+ OOZIE-2550 Flaky tests in TestZKUUIDService.java (pbacsko via rkanter)
+ OOZIE-2445 Doc for - Specifying coordinator input datasets in more logical ways (puru)
+ OOZIE-2541 Possible resource leak in Hive2Credentials (pbacsko via rkanter)
+ OOZIE-2563 Pass spark-defaults.conf to spark action (satishsaley via rohini)
+ OOZIE-2556 TestAbandonedCoordChecker.testCatchupJob is flaky (puru)
+ OOZIE-2522 There can be multiple coord submit from bundle in case of ZK glitch (puru)
+ OOZIE-2553 Cred tag is required for all actions in the workflow even if an action does not require it (me.venkatr via rohini)
  OOZIE-2503 show ChildJobURLs to spark action (satishsaley via puru)
  OOZIE-2551 Feature request: epoch timestamp generation (jtolar via puru)
  OOZIE-2542 Option to disable OpenJPA BrokerImpl finalization (puru)

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/distcp/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hcatalog/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hive/pom.xml
----------------------------------------------------------------------
diff --cc sharelib/hive/pom.xml
index ba49403,b339b51..f2e4ac0
--- a/sharelib/hive/pom.xml
+++ b/sharelib/hive/pom.xml
@@@ -142,10 -150,15 +142,15 @@@
              <scope>provided</scope>
          </dependency>
          <dependency>
 -            <groupId>org.apache.oozie</groupId>
 -            <artifactId>oozie-hadoop-utils</artifactId>
 +            <groupId>org.apache.hadoop</groupId>
 +            <artifactId>hadoop-client</artifactId>
              <scope>provided</scope>
          </dependency>
+         <dependency>
+             <groupId>jline</groupId>
+             <artifactId>jline</artifactId>
+             <version>${hive.jline.version}</version>
+         </dependency>
      </dependencies>
  
      <build>

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hive2/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
----------------------------------------------------------------------
diff --cc sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
index 5e71f12,b023b79..72fadcc
--- a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
+++ b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
@@@ -192,7 -191,7 +191,6 @@@ public class TestHive2ActionExecutor ex
              "<query>" + query + "</query>" + "</hive2>";
      }
  
--    @SuppressWarnings("deprecation")
      public void testHive2Action() throws Exception {
          setupHiveServer2();
          Path inputDir = new Path(getFsTestCaseDir(), INPUT_DIRNAME);
@@@ -259,9 -266,51 +251,44 @@@
              assertTrue(fs.exists(outputDir));
              assertTrue(fs.isDirectory(outputDir));
          }
+         // Negative testcase with incorrect hive-query.
+         {
+             String query = getHive2BadScript(inputDir.toString(), outputDir.toString());
+             Writer dataWriter = new OutputStreamWriter(fs.create(new Path(inputDir, DATA_FILENAME)));
+             dataWriter.write(SAMPLE_DATA_TEXT);
+             dataWriter.close();
+             Context context = createContext(getQueryActionXml(query));
 -            final RunningJob launcherJob = submitAction(context, Namespace.getNamespace("uri:oozie:hive2-action:0.2"));
 -            String launcherId = context.getAction().getExternalId();
 -            waitFor(200 * 1000, new Predicate() {
 -                @Override
 -                public boolean evaluate() throws Exception {
 -                    return launcherJob.isComplete();
 -                }
 -            });
 -            assertTrue(launcherJob.isSuccessful());
++            final String launcherId = submitAction(context, Namespace.getNamespace("uri:oozie:hive2-action:0.2"));
++            waitUntilYarnAppDoneAndAssertSuccess(launcherId);
+             Configuration conf = new XConfiguration();
+             conf.set("user.name", getTestUser());
+             Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
+                     conf);
+             assertFalse(LauncherMapperHelper.hasIdSwap(actionData));
+             Hive2ActionExecutor ae = new Hive2ActionExecutor();
+             ae.check(context, context.getAction());
+             assertTrue(launcherId.equals(context.getAction().getExternalId()));
+             assertEquals("FAILED/KILLED", context.getAction().getExternalStatus());
+             ae.end(context, context.getAction());
+             assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
+             assertNull(context.getExternalChildIDs());
+         }
+     }
+ 
+     private String getHive2BadScript(String inputPath, String outputPath) {
+         StringBuilder buffer = new StringBuilder(NEW_LINE);
+         buffer.append("set -v;").append(NEW_LINE);
+         buffer.append("DROP TABLE IF EXISTS test;").append(NEW_LINE);
+         buffer.append("CREATE EXTERNAL TABLE test (a INT) STORED AS");
+         buffer.append(NEW_LINE).append("TEXTFILE LOCATION '");
+         buffer.append(inputPath).append("';").append(NEW_LINE);
+         buffer.append("INSERT OVERWRITE DIRECTORY '");
+         buffer.append(outputPath).append("'").append(NEW_LINE);
+         buffer.append("SELECT (a-1) FROM test-bad;").append(NEW_LINE);
+         return buffer.toString();
      }
  
 -    private RunningJob submitAction(Context context, Namespace ns) throws Exception {
 +    private String submitAction(Context context, Namespace ns) throws Exception {
          Hive2ActionExecutor ae = new Hive2ActionExecutor();
  
          WorkflowAction action = context.getAction();

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/oozie/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/oozie/blob/c49f382b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
----------------------------------------------------------------------
diff --cc sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
index d17c431,30d68e2..0815318
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
@@@ -41,15 -41,14 +41,17 @@@ public class JavaMain extends LauncherM
          Configuration actionConf = loadActionConf();
  
          setYarnTag(actionConf);
+         setApplicationTags(actionConf, TEZ_APPLICATION_TAGS);
+         setApplicationTags(actionConf, SPARK_YARN_TAGS);
  
 -        LauncherMainHadoopUtils.killChildYarnJobs(actionConf);
 +        LauncherMain.killChildYarnJobs(actionConf);
  
          Class<?> klass = actionConf.getClass(JAVA_MAIN_CLASS, Object.class);
 -        System.out.println("Main class        : " + klass.getName());
 -        LauncherMapper.printArgs("Arguments         :", args);
 +        System.out.println("Java action main class        : " + klass.getName());
 +        System.out.println("Java action arguments         :");
 +        for (String arg : args) {
 +            System.out.println("                    " + arg);
 +        }
          System.out.println();
          Method mainMethod = klass.getMethod("main", String[].class);
          try {