You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by pb...@apache.org on 2016/12/13 13:19:32 UTC
[01/48] oozie git commit: Merge branch 'apache_forked_master' into
apache_forked_oya [Forced Update!]
Repository: oozie
Updated Branches:
refs/heads/oya 34a96f774 -> a1537e1bb (forced update)
Merge branch 'apache_forked_master' into apache_forked_oya
Change-Id: I447eeadb5d4b7469999d057544aff4f33551c7b9
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/b99933b7
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/b99933b7
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/b99933b7
Branch: refs/heads/oya
Commit: b99933b7c9253c1d3f5e8b1e933c04b5fdb677d8
Parents: 4a9542c 1b7da69
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Tue Nov 8 16:37:43 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Tue Nov 8 16:37:43 2016 +0100
----------------------------------------------------------------------
bin/findbugs-diff-0.1.0-all.jar.md5 | 1 +
bin/test-patch-11-findbugs-diff | 325 +++++++++++++++++
.../java/org/apache/oozie/cli/OozieCLI.java | 7 +-
.../org/apache/oozie/client/OozieClient.java | 88 +++--
core/pom.xml | 52 ++-
.../org/apache/oozie/CoordinatorEngine.java | 1 +
.../main/java/org/apache/oozie/DagEngine.java | 1 +
.../jpa/WorkflowsJobGetJPAExecutor.java | 8 +-
.../oozie/service/InstrumentationService.java | 6 +-
.../apache/oozie/store/StoreStatusFilter.java | 33 ++
.../org/apache/oozie/util/Instrumentation.java | 8 +-
.../org/apache/oozie/util/JobsFilterUtils.java | 1 +
.../org/apache/oozie/util/PasswordMasker.java | 121 +++++++
.../action/hadoop/TestFsActionExecutor.java | 3 +
.../org/apache/oozie/client/TestOozieCLI.java | 10 +-
.../command/coord/TestCoordRerunXCommand.java | 3 +
.../oozie/dependency/TestHCatURIHandler.java | 3 +
.../jpa/TestBundleJobInfoGetJPAExecutor.java | 94 +++++
.../jpa/TestCoordJobInfoGetJPAExecutor.java | 95 ++++-
.../jpa/TestWorkflowsJobGetJPAExecutor.java | 73 ++++
.../org/apache/oozie/test/MiniHCatServer.java | 10 -
.../apache/oozie/util/TestPasswordMasker.java | 92 +++++
.../test/resources/instrumentation-os-env.json | 47 +++
.../instrumentation-system-properties.json | 88 +++++
distro/pom.xml | 1 -
docs/src/site/twiki/DG_CommandLineTool.twiki | 354 +++++++++++--------
docs/src/site/twiki/DG_QuickStart.twiki | 6 +-
docs/src/site/twiki/ENG_Building.twiki | 2 +-
docs/src/site/twiki/WebServicesAPI.twiki | 1 +
.../src/site/twiki/WorkflowFunctionalSpec.twiki | 2 +-
examples/src/main/apps/hcatalog/README | 29 +-
examples/src/main/apps/hcatalog/job.properties | 2 +-
pom.xml | 98 ++++-
release-log.txt | 15 +
sharelib/distcp/pom.xml | 1 -
sharelib/hcatalog/pom.xml | 1 -
sharelib/hive/pom.xml | 1 -
.../oozie/action/hadoop/TestHiveMain.java | 4 +-
sharelib/hive2/pom.xml | 60 ++++
.../action/hadoop/TestHive2ActionExecutor.java | 1 +
sharelib/oozie/pom.xml | 1 -
.../hadoop/OozieLauncherOutputCommitter.java | 14 +
sharelib/pig/pom.xml | 25 +-
sharelib/spark/pom.xml | 5 +-
.../apache/oozie/action/hadoop/SparkMain.java | 196 +++++++---
.../oozie/action/hadoop/TestSparkMain.java | 42 ++-
sharelib/sqoop/pom.xml | 1 -
sharelib/streaming/pom.xml | 32 ++
48 files changed, 1751 insertions(+), 313 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/core/pom.xml
----------------------------------------------------------------------
diff --cc core/pom.xml
index c2c005e,163bb3b..8783d1f
--- a/core/pom.xml
+++ b/core/pom.xml
@@@ -80,6 -92,6 +92,22 @@@
<dependency>
<groupId>org.apache.hadoop</groupId>
++ <artifactId>hadoop-yarn-api</artifactId>
++ </dependency>
++
++ <dependency>
++ <groupId>org.apache.hadoop</groupId>
++ <artifactId>hadoop-yarn-common</artifactId>
++ </dependency>
++
++ <dependency>
++ <groupId>org.apache.commons</groupId>
++ <artifactId>commons-lang3</artifactId>
++ <scope>test</scope>
++ </dependency>
++
++ <dependency>
++ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index ce0bbde,a3db3da..7ea789d
--- a/pom.xml
+++ b/pom.xml
@@@ -78,25 -78,31 +78,31 @@@
</oozie.test.default.config.file>
<oozie.test.config.file>${oozie.test.default.config.file}</oozie.test.config.file>
-- <hadoop.version>2.4.0</hadoop.version>
++ <hadoop.version>2.6.0</hadoop.version>
+ <hadoop.majorversion>2</hadoop.majorversion>
+ <hadooplib.version>hadoop-${hadoop.majorversion}-${project.version}</hadooplib.version>
+ <oozie.hadoop-utils.version>hadoop-${hadoop.majorversion}-${project.version}</oozie.hadoop-utils.version>
<hbase.version>0.94.27</hbase.version>
<dropwizard.metrics.version>3.1.2</dropwizard.metrics.version>
<clover.license>/home/jenkins/tools/clover/latest/lib/clover.license</clover.license>
+
<!-- Sharelib component versions -->
- <hive.version>0.13.1</hive.version>
- <hive.jline.version>0.9.94</hive.jline.version>
- <pig.version>0.12.1</pig.version>
+ <hive.version>1.2.0</hive.version>
+ <hive.jline.version>2.12</hive.jline.version>
- <hive.classifier>core</hive.classifier>
+ <pig.version>0.16.0</pig.version>
- <pig.classifier></pig.classifier>
+ <pig.classifier>h2</pig.classifier>
++ <hive.classifier>core</hive.classifier>
<sqoop.version>1.4.3</sqoop.version>
<spark.version>1.6.1</spark.version>
+ <spark.streaming.kafka.version>1.6.1</spark.streaming.kafka.version>
+ <spark.bagel.version>1.6.1</spark.bagel.version>
<spark.guava.version>14.0.1</spark.guava.version>
<spark.scala.binary.version>2.10</spark.scala.binary.version>
- <sqoop.classifier>hadoop100</sqoop.classifier>
+ <sqoop.classifier>hadoop200</sqoop.classifier>
<streaming.version>${hadoop.version}</streaming.version>
- <distcp.version>${hadoop.version}</distcp.version>
<hadoop.auth.version>${hadoop.version}</hadoop.auth.version>
<!-- Tomcat version -->
@@@ -332,6 -348,6 +340,18 @@@
<dependency>
<groupId>org.apache.hadoop</groupId>
++ <artifactId>hadoop-yarn-api</artifactId>
++ <version>${hadoop.version}</version>
++ </dependency>
++
++ <dependency>
++ <groupId>org.apache.hadoop</groupId>
++ <artifactId>hadoop-yarn-common</artifactId>
++ <version>${hadoop.version}</version>
++ </dependency>
++
++ <dependency>
++ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>${hadoop.auth.version}</version>
<scope>compile</scope>
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/release-log.txt
----------------------------------------------------------------------
diff --cc release-log.txt
index af44107,70ffaa6..e850a24
--- a/release-log.txt
+++ b/release-log.txt
@@@ -1,10 -1,24 +1,25 @@@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2590 OYA: Create basic Oozie Launcher Application Master (rkanter)
+OOZIE-2316 Drop support for Hadoop 1 and 0.23 (asasvari via rkanter)
+ OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
+ OOZIE-2225 Add wild card filter for gathering jobs (sai-krish via rkanter)
+ OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
+ OOZIE-1986 Add FindBugs report to pre-commit build (andras.piros via rkanter)
+ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andras.piros via rkanter)
-
-- Oozie 4.3.0 release
+ OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2674 Improve oozie commads documentation (abhishekbafna via rkanter)
+ OOZIE-2710 Oozie HCatalog example workflow fails (abhishekbafna via shwethags)
+ OOZIE-2705 Oozie Spark action ignores spark.executor.extraJavaOptions and spark.driver.extraJavaOptions (gezapeti via rkanter)
+ OOZIE-2621 Use hive-exec-<version>-core instead of hive-exec in oozie-core (gezapeti via rkanter)
+ OOZIE-2613 Upgrade hive version from 0.13.1 to 1.2.0 (abhishekbafna via rkanter)
+ OOZIE-2658 --driver-class-path can overwrite the classpath in SparkMain (gezapeti via rkanter)
+ OOZIE-1814 Oozie should mask any passwords in logs and REST interfaces (andras.piros via rkanter)
+ OOZIE-2622 ExtJS 2.2 is no longer available (rkanter)
+ OOZIE-2606 Set spark.yarn.jars to fix Spark 2.0 with Oozie (satishsaley via rohini)
OOZIE-2673 Include XSD for shell-action:0.3 in documentation (abhishekbafna via rkanter)
OOZIE-2194 oozie job -kill doesn't work with spark action (abhishekbafna via rohini)
OOZIE-2501 ZK reentrant lock doesn't work for few cases (puru)
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/distcp/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/hcatalog/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/hive/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveMain.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/hive2/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/oozie/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/pig/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/spark/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b99933b7/sharelib/sqoop/pom.xml
----------------------------------------------------------------------
[12/48] oozie git commit: OOZIE-2723 JSON.org license is now CatX
(abhishekbafna via shwethags)
Posted by pb...@apache.org.
OOZIE-2723 JSON.org license is now CatX (abhishekbafna via shwethags)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/586f5227
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/586f5227
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/586f5227
Branch: refs/heads/oya
Commit: 586f522721804a84bb40ad4aa528530008c95e36
Parents: 86009e9
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Wed Nov 16 09:54:17 2016 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Wed Nov 16 09:54:17 2016 +0530
----------------------------------------------------------------------
NOTICE.txt | 3 -
RELEASE_NOTES.txt | 4 +
bin/create-release-artifact | 2 +
core/pom.xml | 5 -
.../apache/oozie/util/ELConstantsFunctions.java | 6 +-
.../client/TestOozieClientWithFakeServlet.java | 4 +-
docs/src/site/twiki/DG_QuickStart.twiki | 2 +
pom.xml | 6 -
release-log.txt | 1 +
sharelib/hive/pom.xml | 4 +
sharelib/hive2/pom.xml | 10 +
sharelib/pig/pom.xml | 4 +
src/main/assemblies/distro-jetty.xml | 7 +-
src/main/assemblies/distro-tomcat.xml | 7 +-
webapp/src/main/webapp/index.jsp | 1 -
webapp/src/main/webapp/json2.js | 498 -------------------
16 files changed, 40 insertions(+), 524 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/NOTICE.txt
----------------------------------------------------------------------
diff --git a/NOTICE.txt b/NOTICE.txt
index e2a937c..b70f114 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -9,9 +9,6 @@ Copyright 2011 The Apache Software Foundation
This product includes software developed by The Apache Software
Foundation (http://www.apache.org/).
-This product includes org.json (http://www.json.org/java/index.html),
-Copyright (c) 2002 JSON.org
-
Detailed License information for all components can be found in the
documentation in the ooziedocs.war at index.html##LicenseInfo
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/RELEASE_NOTES.txt
----------------------------------------------------------------------
diff --git a/RELEASE_NOTES.txt b/RELEASE_NOTES.txt
new file mode 100644
index 0000000..6af49f6
--- /dev/null
+++ b/RELEASE_NOTES.txt
@@ -0,0 +1,4 @@
+These notes are for Oozie 4.3.0 release.
+
+Oozie no longer includes JSON.org dependencies. If Hive or Hive2 action in workflow needs JSON.org dependencies, then they need to be
+added manually into respective sharelib.
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/bin/create-release-artifact
----------------------------------------------------------------------
diff --git a/bin/create-release-artifact b/bin/create-release-artifact
index 1f1bcd2..cee5fa6 100755
--- a/bin/create-release-artifact
+++ b/bin/create-release-artifact
@@ -58,6 +58,8 @@ run tar czf oozie-${releaseVersion}.tar.gz oozie-$releaseVersion
run cp oozie-$releaseVersion/release-log.txt .
+run cp oozie-$releaseVersion/RELEASE_NOTES.txt .
+
run cd oozie-$releaseVersion
run mvn apache-rat:check
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 163bb3b..1dd1045 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -117,11 +117,6 @@
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<scope>compile</scope>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/core/src/main/java/org/apache/oozie/util/ELConstantsFunctions.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/ELConstantsFunctions.java b/core/src/main/java/org/apache/oozie/util/ELConstantsFunctions.java
index 4524623..87db66e 100644
--- a/core/src/main/java/org/apache/oozie/util/ELConstantsFunctions.java
+++ b/core/src/main/java/org/apache/oozie/util/ELConstantsFunctions.java
@@ -20,7 +20,7 @@ package org.apache.oozie.util;
import org.apache.hadoop.conf.Configuration;
import org.jdom.Element;
-import org.json.JSONObject;
+import org.json.simple.JSONValue;
import java.text.SimpleDateFormat;
import java.util.Map;
@@ -191,8 +191,8 @@ public class ELConstantsFunctions {
}
public static String toJsonStr(Map<String, String> map) {
- JSONObject json = new JSONObject(map);
- return XmlUtils.escapeCharsForXML(json.toString());
+ String json = JSONValue.toJSONString(map);
+ return XmlUtils.escapeCharsForXML(json);
}
public static String toPropertiesStr(Map<String, String> map) {
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/core/src/test/java/org/apache/oozie/client/TestOozieClientWithFakeServlet.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/client/TestOozieClientWithFakeServlet.java b/core/src/test/java/org/apache/oozie/client/TestOozieClientWithFakeServlet.java
index 179b205..054d949 100644
--- a/core/src/test/java/org/apache/oozie/client/TestOozieClientWithFakeServlet.java
+++ b/core/src/test/java/org/apache/oozie/client/TestOozieClientWithFakeServlet.java
@@ -34,7 +34,7 @@ import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.CoordinatorAction.Status;
import org.apache.oozie.client.rest.BulkResponseImpl;
import org.apache.oozie.client.rest.JsonTags;
-import org.json.JSONArray;
+import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.junit.Test;
@@ -150,7 +150,7 @@ public class TestOozieClientWithFakeServlet {
ByteArrayInputStream result = new ByteArrayInputStream("".getBytes());
if (check) {
JSONArray array = new JSONArray();
- array.put(2L);
+ array.add(2L);
String s = JSONValue.toJSONString(array);
result = new ByteArrayInputStream(s.getBytes());
check = false;
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki b/docs/src/site/twiki/DG_QuickStart.twiki
index 6dec179..76c8c64 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -83,6 +83,8 @@ The Java 1.7+ =bin= directory should be in the command path.
*NOTE:* The ExtJS library is not bundled with Oozie because it uses a different license.
+*NOTE:* Oozie UI browser compatibility Chrome (all), Firefox (3.5), Internet Explorer (8.0), Opera (10.5).
+
*NOTE:* It is recommended to use a Oozie Unix user for the Oozie server.
Expand the Oozie distribution =tar.gz=.
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index acedc6e..551a622 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1409,12 +1409,6 @@
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>${json.version}</version>
- </dependency>
-
- <dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 9dd6adb..add46f9 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -10,6 +10,7 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
+OOZIE-2723 JSON.org license is now CatX (abhishekbafna via shwethags)
OOZIE-2725 Upgrade Tomcat to 6.0.47 for the latest security fixes (rkanter via shwethags)
OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/sharelib/hive/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/hive/pom.xml b/sharelib/hive/pom.xml
index c5e9f12..2b52a82 100644
--- a/sharelib/hive/pom.xml
+++ b/sharelib/hive/pom.xml
@@ -91,6 +91,10 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ </exclusion>
</exclusions>
</dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/sharelib/hive2/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/hive2/pom.xml b/sharelib/hive2/pom.xml
index e81bfbe..4ef219a 100644
--- a/sharelib/hive2/pom.xml
+++ b/sharelib/hive2/pom.xml
@@ -91,10 +91,20 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>${json.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-beeline</artifactId>
<scope>compile</scope>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/sharelib/pig/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/pig/pom.xml b/sharelib/pig/pom.xml
index 67e5bf2..da246f9 100644
--- a/sharelib/pig/pom.xml
+++ b/sharelib/pig/pom.xml
@@ -138,6 +138,10 @@
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ </exclusion>
</exclusions>
</dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/src/main/assemblies/distro-jetty.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-jetty.xml b/src/main/assemblies/distro-jetty.xml
index a4bee03..ba23a50 100644
--- a/src/main/assemblies/distro-jetty.xml
+++ b/src/main/assemblies/distro-jetty.xml
@@ -37,10 +37,11 @@
<directory>${basedir}/../</directory>
<outputDirectory>/</outputDirectory>
<includes>
- <include>license.txt</include>
- <include>notice.txt</include>
- <include>readme.txt</include>
+ <include>LICENSE.txt</include>
+ <include>NOTICE.txt</include>
+ <include>README.txt</include>
<include>release-log.txt</include>
+ <include>RELEASE_NOTES.txt</include>
</includes>
</fileSet>
<fileSet>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/src/main/assemblies/distro-tomcat.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-tomcat.xml b/src/main/assemblies/distro-tomcat.xml
index d7018a3..ba4f56b 100644
--- a/src/main/assemblies/distro-tomcat.xml
+++ b/src/main/assemblies/distro-tomcat.xml
@@ -37,10 +37,11 @@
<directory>${basedir}/../</directory>
<outputDirectory>/</outputDirectory>
<includes>
- <include>license.txt</include>
- <include>notice.txt</include>
- <include>readme.txt</include>
+ <include>LICENSE.txt</include>
+ <include>NOTICE.txt</include>
+ <include>README.txt</include>
<include>release-log.txt</include>
+ <include>RELEASE_NOTES.txt</include>
</includes>
</fileSet>
<fileSet>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/webapp/src/main/webapp/index.jsp
----------------------------------------------------------------------
diff --git a/webapp/src/main/webapp/index.jsp b/webapp/src/main/webapp/index.jsp
index feedde9..61d32ae 100644
--- a/webapp/src/main/webapp/index.jsp
+++ b/webapp/src/main/webapp/index.jsp
@@ -35,7 +35,6 @@
<script type="text/javascript" src="ext-2.2/adapter/ext/ext-base.js"></script>
<script type="text/javascript" src="ext-2.2/ext-all.js"></script>
<script type="text/javascript" src="ext-2.2/examples/grid/RowExpander.js"></script>
- <script type="text/javascript" src="json2.js"></script>
<script type="text/javascript" src="oozie-console.js"></script>
</head>
http://git-wip-us.apache.org/repos/asf/oozie/blob/586f5227/webapp/src/main/webapp/json2.js
----------------------------------------------------------------------
diff --git a/webapp/src/main/webapp/json2.js b/webapp/src/main/webapp/json2.js
deleted file mode 100644
index 32b21db..0000000
--- a/webapp/src/main/webapp/json2.js
+++ /dev/null
@@ -1,498 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/*
- http://www.JSON.org/json2.js
- 2008-11-19
-
- Public Domain.
-
- NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
-
- See http://www.JSON.org/js.html
-
- This file creates a global JSON object containing two methods: stringify
- and parse.
-
- JSON.stringify(value, replacer, space)
- value any JavaScript value, usually an object or array.
-
- replacer an optional parameter that determines how object
- values are stringified for objects. It can be a
- function or an array of strings.
-
- space an optional parameter that specifies the indentation
- of nested structures. If it is omitted, the text will
- be packed without extra whitespace. If it is a number,
- it will specify the number of spaces to indent at each
- level. If it is a string (such as '\t' or ' '),
- it contains the characters used to indent at each level.
-
- This method produces a JSON text from a JavaScript value.
-
- When an object value is found, if the object contains a toJSON
- method, its toJSON method will be called and the result will be
- stringified. A toJSON method does not serialize: it returns the
- value represented by the name/value pair that should be serialized,
- or undefined if nothing should be serialized. The toJSON method
- will be passed the key associated with the value, and this will be
- bound to the object holding the key.
-
- For example, this would serialize Dates as ISO strings.
-
- Date.prototype.toJSON = function (key) {
- function f(n) {
- // Format integers to have at least two digits.
- return n < 10 ? '0' + n : n;
- }
-
- return this.getUTCFullYear() + '-' +
- f(this.getUTCMonth() + 1) + '-' +
- f(this.getUTCDate()) + 'T' +
- f(this.getUTCHours()) + ':' +
- f(this.getUTCMinutes()) + ':' +
- f(this.getUTCSeconds()) + 'Z';
- };
-
- You can provide an optional replacer method. It will be passed the
- key and value of each member, with this bound to the containing
- object. The value that is returned from your method will be
- serialized. If your method returns undefined, then the member will
- be excluded from the serialization.
-
- If the replacer parameter is an array of strings, then it will be
- used to select the members to be serialized. It filters the results
- such that only members with keys listed in the replacer array are
- stringified.
-
- Values that do not have JSON representations, such as undefined or
- functions, will not be serialized. Such values in objects will be
- dropped; in arrays they will be replaced with null. You can use
- a replacer function to replace those with JSON values.
- JSON.stringify(undefined) returns undefined.
-
- The optional space parameter produces a stringification of the
- value that is filled with line breaks and indentation to make it
- easier to read.
-
- If the space parameter is a non-empty string, then that string will
- be used for indentation. If the space parameter is a number, then
- the indentation will be that many spaces.
-
- Example:
-
- text = JSON.stringify(['e', {pluribus: 'unum'}]);
- // text is '["e",{"pluribus":"unum"}]'
-
-
- text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
- // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
-
- text = JSON.stringify([new Date()], function (key, value) {
- return this[key] instanceof Date ?
- 'Date(' + this[key] + ')' : value;
- });
- // text is '["Date(---current time---)"]'
-
-
- JSON.parse(text, reviver)
- This method parses a JSON text to produce an object or array.
- It can throw a SyntaxError exception.
-
- The optional reviver parameter is a function that can filter and
- transform the results. It receives each of the keys and values,
- and its return value is used instead of the original value.
- If it returns what it received, then the structure is not modified.
- If it returns undefined then the member is deleted.
-
- Example:
-
- // Parse the text. Values that look like ISO date strings will
- // be converted to Date objects.
-
- myData = JSON.parse(text, function (key, value) {
- var a;
- if (typeof value === 'string') {
- a =
- /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
- if (a) {
- return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
- +a[5], +a[6]));
- }
- }
- return value;
- });
-
- myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
- var d;
- if (typeof value === 'string' &&
- value.slice(0, 5) === 'Date(' &&
- value.slice(-1) === ')') {
- d = new Date(value.slice(5, -1));
- if (d) {
- return d;
- }
- }
- return value;
- });
-
-
- This is a reference implementation. You are free to copy, modify, or
- redistribute.
-
- This code should be minified before deployment.
- See http://javascript.crockford.com/jsmin.html
-
- USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
- NOT CONTROL.
- */
-
-/*jslint evil: true */
-
-/*global JSON */
-
-/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
- call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
- getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
- lastIndex, length, parse, prototype, push, replace, slice, stringify,
- test, toJSON, toString, valueOf
- */
-
-// Create a JSON object only if one does not already exist. We create the
-// methods in a closure to avoid creating global variables.
-
-if (!this.JSON) {
- JSON = {};
-}
-(function () {
-
- function f(n) {
- // Format integers to have at least two digits.
- return n < 10 ? '0' + n : n;
- }
-
- if (typeof Date.prototype.toJSON !== 'function') {
-
- Date.prototype.toJSON = function (key) {
-
- return this.getUTCFullYear() + '-' +
- f(this.getUTCMonth() + 1) + '-' +
- f(this.getUTCDate()) + 'T' +
- f(this.getUTCHours()) + ':' +
- f(this.getUTCMinutes()) + ':' +
- f(this.getUTCSeconds()) + 'Z';
- };
-
- String.prototype.toJSON =
- Number.prototype.toJSON =
- Boolean.prototype.toJSON = function (key) {
- return this.valueOf();
- };
- }
-
- var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
- escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
- gap,
- indent,
- meta = { // table of character substitutions
- '\b': '\\b',
- '\t': '\\t',
- '\n': '\\n',
- '\f': '\\f',
- '\r': '\\r',
- '"' : '\\"',
- '\\': '\\\\'
- },
- rep;
-
-
- function quote(string) {
-
- // If the string contains no control characters, no quote characters, and no
- // backslash characters, then we can safely slap some quotes around it.
- // Otherwise we must also replace the offending characters with safe escape
- // sequences.
-
- escapable.lastIndex = 0;
- return escapable.test(string) ?
- '"' + string.replace(escapable, function (a) {
- var c = meta[a];
- return typeof c === 'string' ? c :
- '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
- }) + '"' :
- '"' + string + '"';
- }
-
-
- function str(key, holder) {
-
- // Produce a string from holder[key].
-
- var i, // The loop counter.
- k, // The member key.
- v, // The member value.
- length,
- mind = gap,
- partial,
- value = holder[key];
-
- // If the value has a toJSON method, call it to obtain a replacement value.
-
- if (value && typeof value === 'object' &&
- typeof value.toJSON === 'function') {
- value = value.toJSON(key);
- }
-
- // If we were called with a replacer function, then call the replacer to
- // obtain a replacement value.
-
- if (typeof rep === 'function') {
- value = rep.call(holder, key, value);
- }
-
- // What happens next depends on the value's type.
-
- switch (typeof value) {
- case 'string':
- return quote(value);
-
- case 'number':
-
- // JSON numbers must be finite. Encode non-finite numbers as null.
-
- return isFinite(value) ? String(value) : 'null';
-
- case 'boolean':
- case 'null':
-
- // If the value is a boolean or null, convert it to a string. Note:
- // typeof null does not produce 'null'. The case is included here in
- // the remote chance that this gets fixed someday.
-
- return String(value);
-
- // If the type is 'object', we might be dealing with an object or an array or
- // null.
-
- case 'object':
-
- // Due to a specification blunder in ECMAScript, typeof null is 'object',
- // so watch out for that case.
-
- if (!value) {
- return 'null';
- }
-
- // Make an array to hold the partial results of stringifying this object value.
-
- gap += indent;
- partial = [];
-
- // Is the value an array?
-
- if (Object.prototype.toString.apply(value) === '[object Array]') {
-
- // The value is an array. Stringify every element. Use null as a placeholder
- // for non-JSON values.
-
- length = value.length;
- for (i = 0; i < length; i += 1) {
- partial[i] = str(i, value) || 'null';
- }
-
- // Join all of the elements together, separated with commas, and wrap them in
- // brackets.
-
- v = partial.length === 0 ? '[]' :
- gap ? '[\n' + gap +
- partial.join(',\n' + gap) + '\n' +
- mind + ']' :
- '[' + partial.join(',') + ']';
- gap = mind;
- return v;
- }
-
- // If the replacer is an array, use it to select the members to be stringified.
-
- if (rep && typeof rep === 'object') {
- length = rep.length;
- for (i = 0; i < length; i += 1) {
- k = rep[i];
- if (typeof k === 'string') {
- v = str(k, value);
- if (v) {
- partial.push(quote(k) + (gap ? ': ' : ':') + v);
- }
- }
- }
- }
- else {
-
- // Otherwise, iterate through all of the keys in the object.
-
- for (k in value) {
- if (Object.hasOwnProperty.call(value, k)) {
- v = str(k, value);
- if (v) {
- partial.push(quote(k) + (gap ? ': ' : ':') + v);
- }
- }
- }
- }
-
- // Join all of the member texts together, separated with commas,
- // and wrap them in braces.
-
- v = partial.length === 0 ? '{}' :
- gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
- mind + '}' : '{' + partial.join(',') + '}';
- gap = mind;
- return v;
- }
- }
-
- // If the JSON object does not yet have a stringify method, give it one.
-
- if (typeof JSON.stringify !== 'function') {
- JSON.stringify = function (value, replacer, space) {
-
- // The stringify method takes a value and an optional replacer, and an optional
- // space parameter, and returns a JSON text. The replacer can be a function
- // that can replace values, or an array of strings that will select the keys.
- // A default replacer method can be provided. Use of the space parameter can
- // produce text that is more easily readable.
-
- var i;
- gap = '';
- indent = '';
-
- // If the space parameter is a number, make an indent string containing that
- // many spaces.
-
- if (typeof space === 'number') {
- for (i = 0; i < space; i += 1) {
- indent += ' ';
- }
-
- // If the space parameter is a string, it will be used as the indent string.
-
- }
- else if (typeof space === 'string') {
- indent = space;
- }
-
- // If there is a replacer, it must be a function or an array.
- // Otherwise, throw an error.
-
- rep = replacer;
- if (replacer && typeof replacer !== 'function' &&
- (typeof replacer !== 'object' ||
- typeof replacer.length !== 'number')) {
- throw new Error('JSON.stringify');
- }
-
- // Make a fake root object containing our value under the key of ''.
- // Return the result of stringifying the value.
-
- return str('', {'': value});
- };
- }
-
-
- // If the JSON object does not yet have a parse method, give it one.
-
- if (typeof JSON.parse !== 'function') {
- JSON.parse = function (text, reviver) {
-
- // The parse method takes a text and an optional reviver function, and returns
- // a JavaScript value if the text is a valid JSON text.
-
- var j;
-
- function walk(holder, key) {
-
- // The walk method is used to recursively walk the resulting structure so
- // that modifications can be made.
-
- var k, v, value = holder[key];
- if (value && typeof value === 'object') {
- for (k in value) {
- if (Object.hasOwnProperty.call(value, k)) {
- v = walk(value, k);
- if (v !== undefined) {
- value[k] = v;
- }
- else {
- delete value[k];
- }
- }
- }
- }
- return reviver.call(holder, key, value);
- }
-
-
- // Parsing happens in four stages. In the first stage, we replace certain
- // Unicode characters with escape sequences. JavaScript handles many characters
- // incorrectly, either silently deleting them, or treating them as line endings.
-
- cx.lastIndex = 0;
- if (cx.test(text)) {
- text = text.replace(cx, function (a) {
- return '\\u' +
- ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
- });
- }
-
- // In the second stage, we run the text against regular expressions that look
- // for non-JSON patterns. We are especially concerned with '()' and 'new'
- // because they can cause invocation, and '=' because it can cause mutation.
- // But just to be safe, we want to reject all unexpected forms.
-
- // We split the second stage into 4 regexp operations in order to work around
- // crippling inefficiencies in IE's and Safari's regexp engines. First we
- // replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
- // replace all simple value tokens with ']' characters. Third, we delete all
- // open brackets that follow a colon or comma or that begin the text. Finally,
- // we look to see that the remaining characters are only whitespace or ']' or
- // ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
-
- if (/^[\],:{}\s]*$/.
- test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
- replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
- replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
-
- // In the third stage we use the eval function to compile the text into a
- // JavaScript structure. The '{' operator is subject to a syntactic ambiguity
- // in JavaScript: it can begin a block or an object literal. We wrap the text
- // in parens to eliminate the ambiguity.
-
- j = eval('(' + text + ')');
-
- // In the optional fourth stage, we recursively walk the new structure, passing
- // each name/value pair to a reviver function for possible transformation.
-
- return typeof reviver === 'function' ?
- walk({'': j}, '') : j;
- }
-
- // If the text is not JSON parseable, then a SyntaxError is thrown.
-
- throw new SyntaxError('JSON.parse');
- };
- }
-})();
[48/48] oozie git commit: Merge branch 'apache_forked_master' into
apache_forked_oya
Posted by pb...@apache.org.
Merge branch 'apache_forked_master' into apache_forked_oya
Change-Id: Id369937bb6ae485eeaf620c7002cd546b5b98471
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/a1537e1b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/a1537e1b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/a1537e1b
Branch: refs/heads/oya
Commit: a1537e1bb7cf381b822740377c16f258b24a5453
Parents: 876aceb fe182fd
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Tue Dec 13 12:05:50 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Tue Dec 13 12:05:50 2016 +0100
----------------------------------------------------------------------
bin/mkdistro.sh | 2 +-
bin/test-patch-11-findbugs-diff | 28 +-
bin/test-patch-20-tests | 4 +
.../java/org/apache/oozie/cli/OozieCLI.java | 22 +-
core/pom.xml | 5 -
core/src/main/conf/oozie-env.sh | 5 +-
.../oozie/action/hadoop/JavaActionExecutor.java | 8 +
.../apache/oozie/coord/CoordELFunctions.java | 7 +-
.../jpa/WorkflowsJobGetJPAExecutor.java | 1 +
.../apache/oozie/store/StoreStatusFilter.java | 1 +
.../java/org/apache/oozie/util/ConfigUtils.java | 12 +-
.../apache/oozie/util/ELConstantsFunctions.java | 6 +-
.../org/apache/oozie/util/Instrumentation.java | 8 +-
core/src/main/resources/oozie-default.xml | 93 +++-
.../action/hadoop/TestJavaActionExecutor.java | 1 +
.../client/TestOozieClientWithFakeServlet.java | 4 +-
.../oozie/coord/TestCoordELFunctions.java | 36 ++
.../jpa/TestBundleJobInfoGetJPAExecutor.java | 19 +
.../jpa/TestCoordJobInfoGetJPAExecutor.java | 19 +
.../jpa/TestWorkflowsJobGetJPAExecutor.java | 19 +
.../TestCoordMaterializeTriggerService.java | 55 +-
.../java/org/apache/oozie/test/XTestCase.java | 2 +
distro/pom.xml | 42 +-
distro/src/main/bin/oozie-jetty-server.sh | 226 +++++++++
distro/src/main/bin/oozie-setup.sh | 182 ++-----
distro/src/main/bin/oozie-sys.sh | 33 +-
distro/src/main/bin/oozied.sh | 87 +---
distro/src/main/tomcat/ROOT/WEB-INF/web.xml | 25 -
distro/src/main/tomcat/ROOT/favicon.ico | Bin 1150 -> 0 bytes
distro/src/main/tomcat/ROOT/index.html | 25 -
distro/src/main/tomcat/logging.properties | 64 ---
distro/src/main/tomcat/server.xml | 150 ------
distro/src/main/tomcat/ssl-server.xml | 152 ------
distro/src/main/tomcat/ssl-web.xml | 295 -----------
docs/src/site/twiki/DG_QuickStart.twiki | 7 +-
docs/src/site/twiki/ENG_Building.twiki | 3 +-
pom.xml | 487 +++++++++++++++++-
release-log.txt | 16 +-
server/pom.xml | 257 ++++++++++
server/src/main/assemblies/empty.xml | 21 +
.../oozie/server/EmbeddedOozieServer.java | 206 ++++++++
.../org/apache/oozie/server/FilterMapper.java | 61 +++
.../oozie/server/HttpConfigurationWrapper.java | 63 +++
.../org/apache/oozie/server/JspHandler.java | 158 ++++++
.../oozie/server/SSLServerConnectorFactory.java | 136 +++++
.../org/apache/oozie/server/ServletMapper.java | 95 ++++
.../oozie/server/WebRootResourceLocator.java | 39 ++
.../ConstraintSecurityHandlerProvider.java | 47 ++
.../oozie/server/guice/JettyServerProvider.java | 48 ++
.../oozie/server/guice/JspHandlerProvider.java | 47 ++
.../oozie/server/guice/OozieGuiceModule.java | 45 ++
.../server/guice/RewriteHandlerProvider.java | 44 ++
.../oozie/server/guice/ServicesProvider.java | 39 ++
server/src/main/resources/checkstyle-header.txt | 17 +
server/src/main/resources/checkstyle.xml | 41 ++
.../oozie/server/TestEmbeddedOozieServer.java | 119 +++++
.../org/apache/oozie/server/TestJspHandler.java | 94 ++++
.../server/TestSSLServerConnectorFactory.java | 137 +++++
sharelib/hive/pom.xml | 34 ++
sharelib/pig/pom.xml | 34 ++
.../hadoop/TestMapReduceActionExecutor.java | 23 +-
src/main/assemblies/distro.xml | 60 +--
webapp/src/main/webapp/403.html | 31 ++
webapp/src/main/webapp/404.html | 31 ++
webapp/src/main/webapp/index.jsp | 1 -
webapp/src/main/webapp/json2.js | 498 -------------------
66 files changed, 2987 insertions(+), 1590 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/core/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index b8c870c,4beeb96..371f171
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@@ -25,7 -25,7 +25,8 @@@ import java.net.ConnectException
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+ import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
@@@ -46,14 -46,14 +47,18 @@@ import org.apache.hadoop.fs.FileStatus
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AccessControlException;
-import org.apache.oozie.hadoop.utils.HadoopShims;
+import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.TaskLog;
+import org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+ import org.apache.hadoop.mapred.JobID;
+ import org.apache.hadoop.mapred.RunningJob;
+ import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
+ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.DiskChecker;
@@@ -85,8 -71,8 +90,9 @@@ import org.apache.oozie.service.HadoopA
import org.apache.oozie.service.Services;
import org.apache.oozie.service.ShareLibService;
import org.apache.oozie.service.URIHandlerService;
+ import org.apache.oozie.service.UserGroupInformationService;
import org.apache.oozie.service.WorkflowAppService;
+import org.apache.oozie.util.ClasspathUtils;
import org.apache.oozie.util.ELEvaluationException;
import org.apache.oozie.util.ELEvaluator;
import org.apache.oozie.util.JobUtils;
@@@ -1515,18 -1593,41 +1521,20 @@@ public class JavaActionExecutor extend
@Override
public void kill(Context context, WorkflowAction action) throws ActionExecutorException {
- JobClient jobClient = null;
- boolean exception = false;
+ YarnClient yarnClient = null;
try {
Element actionXml = XmlUtils.parseXml(action.getConf());
- final JobConf jobConf = createBaseHadoopConf(context, actionXml);
- WorkflowJob wfJob = context.getWorkflow();
- Configuration conf = null;
- if ( wfJob.getConf() != null ) {
- conf = new XConfiguration(new StringReader(wfJob.getConf()));
- }
- String launcherTag = LauncherMapperHelper.getActionYarnTag(conf, wfJob.getParentId(), action);
- jobConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, LauncherMapperHelper.getTag(launcherTag));
- jobConf.set(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME, Long.toString(action.getStartTime().getTime()));
- UserGroupInformation ugi = Services.get().get(UserGroupInformationService.class)
- .getProxyUser(context.getWorkflow().getUser());
- ugi.doAs(new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
- LauncherMainHadoopUtils.killChildYarnJobs(jobConf);
- return null;
- }
- });
- jobClient = createJobClient(context, jobConf);
- RunningJob runningJob = getRunningJob(context, action, jobClient);
- if (runningJob != null) {
- runningJob.killJob();
- }
++
+ JobConf jobConf = createBaseHadoopConf(context, actionXml);
+ yarnClient = createYarnClient(context, jobConf);
+ yarnClient.killApplication(ConverterUtils.toApplicationId(action.getExternalId()));
++
context.setExternalStatus(KILLED);
context.setExecutionData(KILLED, null);
- }
- catch (Exception ex) {
- exception = true;
+ } catch (Exception ex) {
+ LOG.error("Error: ", ex);
throw convertException(ex);
- }
- finally {
+ } finally {
try {
FileSystem actionFs = context.getAppFileSystem();
cleanUpActionDir(actionFs, context);
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/core/src/main/resources/oozie-default.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --cc core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index bfc8ab4,8965cdf..aa485ef
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@@ -489,7 -543,68 +489,8 @@@ public class TestJavaActionExecutor ext
assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
}
- public void testChildKill() throws Exception {
- final JobConf clusterConf = createJobConf();
- FileSystem fileSystem = FileSystem.get(clusterConf);
- Path confFile = new Path("/tmp/cluster-conf.xml");
- OutputStream out = fileSystem.create(confFile);
- clusterConf.writeXml(out);
- out.close();
- String confFileName = fileSystem.makeQualified(confFile).toString() + "#core-site.xml";
- final String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
- "<name-node>" + getNameNodeUri() + "</name-node>" +
- "<main-class> " + SleepJob.class.getName() + " </main-class>" +
- "<arg>-mt</arg>" +
- "<arg>300000</arg>" +
- "<archive>" + confFileName + "</archive>" +
- "</java>";
- final Context context = createContext(actionXml, null);
- final RunningJob runningJob = submitAction(context);
- waitFor(60 * 1000, new Predicate() {
- @Override
- public boolean evaluate() throws Exception {
- return runningJob.getJobStatus().getRunState() == 1;
- }
- });
- assertFalse(runningJob.isComplete());
- Thread.sleep(15000);
- JavaActionExecutor ae = new JavaActionExecutor();
- ae.kill(context, context.getAction());
-
- WorkflowJob wfJob = context.getWorkflow();
- Configuration conf = null;
- if (wfJob.getConf() != null) {
- conf = new XConfiguration(new StringReader(wfJob.getConf()));
- }
- String launcherTag = LauncherMapperHelper.getActionYarnTag(conf, wfJob.getParentId(), context.getAction());
- final Configuration jobConf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
- jobConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, LauncherMapperHelper.getTag(launcherTag));
- jobConf.setLong(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME, context.getAction().getStartTime().getTime());
-
- UserGroupInformationService ugiService = Services.get().get(UserGroupInformationService.class);
- UserGroupInformation ugi = ugiService.getProxyUser(getTestUser());
- Set<String> childSet = ugi.doAs(new PrivilegedExceptionAction<Set<String>>() {
- @Override
- public Set<String> run() throws Exception {
- Set<String> childSet = LauncherMainHadoopUtils.getChildJobs(jobConf);
- return childSet;
- }
- });
- assertEquals(1, childSet.size());
-
- JobClient jobClient = new JobClient(clusterConf);
- for (String jobId : childSet) {
- RunningJob childJob = jobClient.getJob(jobId);
- assertEquals(JobStatus.State.KILLED.getValue(), childJob.getJobStatus().getRunState());
- }
- assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
- assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
- assertEquals("KILLED", context.getAction().getExternalStatus());
- assertFalse(runningJob.isSuccessful());
- }
+ public void testExceptionSubmitException() throws Exception {
+
- public void testExceptionSubmitException() throws Exception {
String actionXml = "<java>" +
"<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --cc docs/src/site/twiki/DG_QuickStart.twiki
index 88f0533,2b9dc3b..2454985
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@@ -41,14 -41,18 +41,14 @@@ suitable when same oozie package needs
2. Build with -Puber which will bundle the required libraries in the oozie war. Further, the following options are
available to customise the versions of the dependencies:
--P<profile> - default hadoop-2. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
-profile depending on the hadoop version used.
--Ptez - Bundle tez jars in hive and pig sharelibs. Useful if you want to use tez
-+as the execution engine for those applications.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.4.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
+-Dhadoop.version=<version> - default 2.4.0
-Dhadoop.auth.version=<version> - defaults to hadoop version
-Ddistcp.version=<version> - defaults to hadoop version
--Dpig.version=<version> - default 0.16.0
--Dpig.classifier=<classifier> - default none
+-Dpig.version=<version> - default 0.12.1
+-Dpig.classifier=<classifier> - default h2
-Dsqoop.version=<version> - default 1.4.3
- -Dsqoop.classifier=<classifier> - default hadoop200
- -Dtomcat.version=<version> - default 6.0.44
+ -Dsqoop.classifier=<classifier> - default hadoop100
+ -Dtomcat.version=<version> - default 6.0.47
-Dopenjpa.version=<version> - default 2.2.2
-Dxerces.version=<version> - default 2.10.0
-Dcurator.version=<version> - default 2.5.0
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index 7ea789d,0cb5c78..403b626
--- a/pom.xml
+++ b/pom.xml
@@@ -101,12 -100,14 +101,13 @@@
<spark.bagel.version>1.6.1</spark.bagel.version>
<spark.guava.version>14.0.1</spark.guava.version>
<spark.scala.binary.version>2.10</spark.scala.binary.version>
- <sqoop.classifier>hadoop100</sqoop.classifier>
+ <sqoop.classifier>hadoop200</sqoop.classifier>
<streaming.version>${hadoop.version}</streaming.version>
- <distcp.version>${hadoop.version}</distcp.version>
<hadoop.auth.version>${hadoop.version}</hadoop.auth.version>
+ <tez.version>0.8.4</tez.version>
+
+ <jetty.version>9.2.19.v20160908</jetty.version>
- <!-- Tomcat version -->
- <tomcat.version>6.0.44</tomcat.version>
<jline.version>0.9.94</jline.version>
<openjpa.version>2.4.1</openjpa.version>
<xerces.version>2.10.0</xerces.version>
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/release-log.txt
----------------------------------------------------------------------
diff --cc release-log.txt
index e850a24,5aa5352..9660968
--- a/release-log.txt
+++ b/release-log.txt
@@@ -1,15 -1,26 +1,27 @@@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2590 OYA: Create basic Oozie Launcher Application Master (rkanter)
+OOZIE-2316 Drop support for Hadoop 1 and 0.23 (asasvari via rkanter)
+ OOZIE-2616 Add Tez profile for Hive and Pig sharelibs (poeppt via rkanter)
+ OOZIE-2741 Remove Tomcat (asasvari via rkanter)
+ OOZIE-2745 test-patch should also list the failed tests (gezapeti via rkanter)
+ OOZIE-2740 oozie help misspelled coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
+ OOZIE-2690 OOZIE NPE while executing kill() (abhishekbafna via jaydeepvishwakarma)
+ OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
+ OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
- OOZIE-2225 Add wild card filter for gathering jobs (sai-krish via rkanter)
- OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
+ OOZIE-2225 Add wild card filter for gathering jobs (sai-krish,pbacsko via rkanter,rohini)
OOZIE-1986 Add FindBugs report to pre-commit build (andras.piros via rkanter)
OOZIE-2634 Queue dump command message is confusing when the queue is empty (andras.piros via rkanter)
-
-- Oozie 4.3.0 release
+ OOZIE-2742 Unable to kill applications based on tag (satishsaley via rohini)
+ OOZIE-2720 Test failure - TestCoordMaterializeTriggerService#testMaxMatThrottleNotPicked (gezapeti via rohini)
+ OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
+ OOZIE-2723 JSON.org license is now CatX (rkanter, abhishekbafna via shwethags)
+ OOZIE-2725 Upgrade Tomcat to 6.0.47 for the latest security fixes (rkanter via shwethags)
+ OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
OOZIE-2674 Improve oozie commads documentation (abhishekbafna via rkanter)
OOZIE-2710 Oozie HCatalog example workflow fails (abhishekbafna via shwethags)
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/sharelib/hive/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/sharelib/pig/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a1537e1b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
----------------------------------------------------------------------
diff --cc sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
index 88f9a7c,c7860be..ac7af7c
--- a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
+++ b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
@@@ -39,39 -68,11 +39,40 @@@ import java.util.jar.JarOutputStream
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.filecache.DistributedCache;
+import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.streaming.StreamJob;
+import org.apache.oozie.WorkflowActionBean;
+import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.ActionExecutorException;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.WorkflowAction;
++import org.apache.oozie.client.WorkflowAction.Status;
+import org.apache.oozie.command.wf.StartXCommand;
+import org.apache.oozie.command.wf.SubmitXCommand;
+import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor;
+import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.WorkflowAppService;
+import org.apache.oozie.util.ClassUtils;
+import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.PropertiesUtils;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XmlUtils;
+import org.jdom.Element;
public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
[08/48] oozie git commit: OOZIE-2666 Support embedding Jetty into
Oozie (asasvari via rkanter)
Posted by pb...@apache.org.
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/SSLServerConnectorFactory.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/SSLServerConnectorFactory.java b/server/src/main/java/org/apache/oozie/server/SSLServerConnectorFactory.java
new file mode 100644
index 0000000..2797cf4
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/SSLServerConnectorFactory.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+
+import com.google.common.base.Preconditions;
+import com.google.inject.Inject;
+import org.apache.hadoop.conf.Configuration;
+import org.eclipse.jetty.http.HttpVersion;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Arrays;
+
+/**
+ * Factory that is used to configure SSL settings for the Oozie server.
+ */
+class SSLServerConnectorFactory {
+ private static final Logger LOG = LoggerFactory.getLogger(SSLServerConnectorFactory.class);
+ public static final String OOZIE_HTTPS_TRUSTSTORE_FILE = "oozie.https.truststore.file";
+ public static final String OOZIE_HTTPS_TRUSTSTORE_PASS = "oozie.https.truststore.pass";
+ public static final String OOZIE_HTTPS_KEYSTORE_PASS = "oozie.https.keystore.pass";
+ public static final String OOZIE_HTTPS_KEYSTORE_FILE = "oozie.https.keystore.file";
+
+ private SslContextFactory sslContextFactory;
+ private Configuration conf;
+
+ @Inject
+ public SSLServerConnectorFactory(final SslContextFactory sslContextFactory) {
+ this.sslContextFactory = Preconditions.checkNotNull(sslContextFactory, "sslContextFactory is null");
+ }
+
+ /**
+ * Construct a ServerConnector object with SSL settings
+ *
+ * @param oozieHttpsPort Oozie HTTPS port
+ * @param conf Oozie configuration
+ * @param server jetty Server which the connector is attached to
+ *
+ * @return ServerConnector
+ */
+ public ServerConnector createSecureServerConnector(int oozieHttpsPort, Configuration conf, Server server) {
+ this.conf = Preconditions.checkNotNull(conf, "conf is null");
+ Preconditions.checkNotNull(server, "server is null");
+ Preconditions.checkState(oozieHttpsPort >= 1 && oozieHttpsPort <= 65535,
+ String.format("Invalid port number specified: \'%d\'. It should be between 1 and 65535.", oozieHttpsPort));
+
+ setIncludeProtocols();
+ setCipherSuites();
+ setTrustStorePath();
+ setTrustStorePass();
+
+ setKeyStoreFile();
+ setKeystorePass();
+
+ HttpConfiguration httpsConfiguration = getHttpsConfiguration();
+ ServerConnector secureServerConnector = new ServerConnector(server,
+ new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString()),
+ new HttpConnectionFactory(httpsConfiguration));
+
+ secureServerConnector.setPort(oozieHttpsPort);
+
+ LOG.info(String.format("Secure server connector created, listenning on port %d", oozieHttpsPort));
+ return secureServerConnector;
+ }
+
+ private void setCipherSuites() {
+ String excludeCipherList = conf.get("oozie.https.exclude.cipher.suites");
+ String[] excludeCipherSuites = excludeCipherList.split(",");
+ sslContextFactory.setExcludeCipherSuites(excludeCipherSuites);
+
+ LOG.info(String.format("SSL context - excluding cipher suites: %s", Arrays.toString(excludeCipherSuites)));
+ }
+
+ private void setIncludeProtocols() {
+ String enabledProtocolsList = conf.get("oozie.https.include.protocols");
+ String[] enabledProtocols = enabledProtocolsList.split(",");
+ sslContextFactory.setIncludeProtocols(enabledProtocols);
+
+ LOG.info(String.format("SSL context - including protocols: %s", Arrays.toString(enabledProtocols)));
+ }
+
+ private void setTrustStorePath() {
+ String trustStorePath = conf.get(OOZIE_HTTPS_TRUSTSTORE_FILE);
+ Preconditions.checkNotNull(trustStorePath, "trustStorePath is null");
+ sslContextFactory.setTrustStorePath(trustStorePath);
+ }
+
+ private void setTrustStorePass() {
+ String trustStorePass = conf.get(OOZIE_HTTPS_TRUSTSTORE_PASS);
+ Preconditions.checkNotNull(trustStorePass, "setTrustStorePass is null");
+ sslContextFactory.setTrustStorePassword(trustStorePass);
+ }
+
+ private void setKeystorePass() {
+ String keystorePass = conf.get(OOZIE_HTTPS_KEYSTORE_PASS);
+ Preconditions.checkNotNull(keystorePass, "keystorePass is null");
+ sslContextFactory.setKeyManagerPassword(keystorePass);
+ }
+
+ private void setKeyStoreFile() {
+ String keystoreFile = conf.get(OOZIE_HTTPS_KEYSTORE_FILE);
+ Preconditions.checkNotNull(keystoreFile, "keystoreFile is null");
+ sslContextFactory.setKeyStorePath(keystoreFile);
+ }
+
+ private HttpConfiguration getHttpsConfiguration() {
+ HttpConfiguration https = new HttpConfigurationWrapper(conf).getDefaultHttpConfiguration();
+ https.setSecureScheme("https");
+ https.addCustomizer(new SecureRequestCustomizer());
+ return https;
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/ServletMapper.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/ServletMapper.java b/server/src/main/java/org/apache/oozie/server/ServletMapper.java
new file mode 100644
index 0000000..ae27ac3
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/ServletMapper.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import com.google.common.base.Preconditions;
+import com.google.inject.Inject;
+import org.apache.oozie.servlet.CallbackServlet;
+import org.apache.oozie.servlet.SLAServlet;
+import org.apache.oozie.servlet.V0AdminServlet;
+import org.apache.oozie.servlet.V0JobServlet;
+import org.apache.oozie.servlet.V0JobsServlet;
+import org.apache.oozie.servlet.V1AdminServlet;
+import org.apache.oozie.servlet.V1JobServlet;
+import org.apache.oozie.servlet.V2AdminServlet;
+import org.apache.oozie.servlet.V2JobServlet;
+import org.apache.oozie.servlet.V2SLAServlet;
+import org.apache.oozie.servlet.V2ValidateServlet;
+import org.apache.oozie.servlet.VersionServlet;
+import org.eclipse.jetty.servlet.ServletHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.servlet.ServletMapping;
+import org.eclipse.jetty.webapp.WebAppContext;
+
+import javax.servlet.Servlet;
+
+
+public class ServletMapper {
+ private final WebAppContext servletContextHandler;
+
+ @Inject
+ public ServletMapper(final WebAppContext servletContextHandler) {
+ this.servletContextHandler = Preconditions.checkNotNull(servletContextHandler, "ServletContextHandler is null");
+ }
+ /**
+ * Maps Oozie servlets to path specs. Make sure it is in sync with FilterMapper when making changes.
+ * */
+ void mapOozieServlets() {
+ mapServlet(VersionServlet.class, "/versions");
+ mapServlet(V0AdminServlet.class, "/v0/admin/*");
+ mapServlet(V1AdminServlet.class, "/v1/admin/*");
+ mapServlet(V2AdminServlet.class, "/v2/admin/*");
+
+ mapServlet(CallbackServlet.class, "/callback/*");
+
+ ServletHandler servletHandler = servletContextHandler.getServletHandler();
+ String voJobservletName = V0JobsServlet.class.getSimpleName();
+ servletHandler.addServlet(new ServletHolder(voJobservletName, new V0JobsServlet()));
+ ServletMapping jobServletMappingV0 = new ServletMapping();
+ jobServletMappingV0.setPathSpec("/v0/jobs");
+ jobServletMappingV0.setServletName(voJobservletName);
+
+ ServletMapping jobServletMappingV1 = new ServletMapping();
+ jobServletMappingV1.setPathSpec("/v1/jobs");
+ jobServletMappingV1.setServletName(voJobservletName);
+
+ ServletMapping jobServletMappingV2 = new ServletMapping();
+ jobServletMappingV2.setPathSpec("/v2/jobs");
+ jobServletMappingV2.setServletName(voJobservletName);
+
+ servletHandler.addServletMapping(jobServletMappingV0);
+ servletHandler.addServletMapping(jobServletMappingV1);
+ servletHandler.addServletMapping(jobServletMappingV2);
+
+ mapServlet(V0JobServlet.class, "/v0/job/*");
+ mapServlet(V1JobServlet.class, "/v1/job/*");
+ mapServlet(V2JobServlet.class, "/v2/job/*");
+ mapServlet(SLAServlet.class, "/v1/sla/*");
+ mapServlet(V2SLAServlet.class, "/v2/sla/*");
+ mapServlet(V2ValidateServlet.class, "/v2/validate/*");
+ }
+
+ private void mapServlet(final Class<? extends Servlet> servletClass, final String servletPath) {
+ try {
+ servletContextHandler.addServlet(new ServletHolder(servletClass.newInstance()), servletPath);
+ } catch (final InstantiationException | IllegalAccessException e) {
+ e.printStackTrace();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/WebRootResourceLocator.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/WebRootResourceLocator.java b/server/src/main/java/org/apache/oozie/server/WebRootResourceLocator.java
new file mode 100644
index 0000000..190d1c2
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/WebRootResourceLocator.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import java.io.FileNotFoundException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+public class WebRootResourceLocator {
+ private static final String WEBROOT_INDEX = "/webapp/";
+
+ public URI getWebRootResourceUri() throws FileNotFoundException, URISyntaxException
+ {
+ URL indexUri = JspHandler.class.getResource(WebRootResourceLocator.WEBROOT_INDEX);
+ if (indexUri == null)
+ {
+ throw new FileNotFoundException("Unable to find resource " + WebRootResourceLocator.WEBROOT_INDEX);
+ }
+ // Points to wherever /webroot/ (the resource) is
+ return indexUri.toURI();
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/guice/ConstraintSecurityHandlerProvider.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/guice/ConstraintSecurityHandlerProvider.java b/server/src/main/java/org/apache/oozie/server/guice/ConstraintSecurityHandlerProvider.java
new file mode 100644
index 0000000..6c313fe
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/guice/ConstraintSecurityHandlerProvider.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server.guice;
+
+import com.google.inject.Provider;
+import org.eclipse.jetty.security.ConstraintMapping;
+import org.eclipse.jetty.security.ConstraintSecurityHandler;
+import org.eclipse.jetty.util.security.Constraint;
+
+import java.util.Arrays;
+
+class ConstraintSecurityHandlerProvider implements Provider<ConstraintSecurityHandler> {
+ @Override
+ public ConstraintSecurityHandler get() {
+ ConstraintMapping callbackConstraintMapping = new ConstraintMapping();
+ callbackConstraintMapping.setPathSpec("/callback/*");
+ Constraint unsecureConstraint = new Constraint();
+ unsecureConstraint.setDataConstraint(Constraint.DC_NONE);
+ callbackConstraintMapping.setConstraint(unsecureConstraint);
+
+ ConstraintMapping mapping = new ConstraintMapping();
+ mapping.setPathSpec("/*");
+ Constraint constraint = new Constraint();
+ constraint.setDataConstraint(Constraint.DC_CONFIDENTIAL);
+ mapping.setConstraint(constraint);
+
+ ConstraintSecurityHandler security = new ConstraintSecurityHandler();
+ security.setConstraintMappings(Arrays.asList(callbackConstraintMapping, mapping));
+ return security;
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/guice/JettyServerProvider.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/guice/JettyServerProvider.java b/server/src/main/java/org/apache/oozie/server/guice/JettyServerProvider.java
new file mode 100644
index 0000000..6580a9a
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/guice/JettyServerProvider.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server.guice;
+
+import com.google.inject.Inject;
+import com.google.inject.Provider;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.Services;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+
+class JettyServerProvider implements Provider<Server> {
+ public static final String OOZIE_SERVER_THREADPOOL_MAX_THREADS = "oozie.server.threadpool.max.threads";
+ private final Configuration oozieConfiguration;
+
+ @Inject
+ public JettyServerProvider(final Services oozieServices) {
+ oozieConfiguration = oozieServices.get(ConfigurationService.class).getConf();
+ }
+
+ @Override
+ public Server get() {
+ final QueuedThreadPool threadPool = new QueuedThreadPool();
+
+ final int maxThreads = Integer.parseInt(
+ oozieConfiguration.get(OOZIE_SERVER_THREADPOOL_MAX_THREADS));
+ threadPool.setMaxThreads(maxThreads);
+
+ return new Server(threadPool);
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/guice/JspHandlerProvider.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/guice/JspHandlerProvider.java b/server/src/main/java/org/apache/oozie/server/guice/JspHandlerProvider.java
new file mode 100644
index 0000000..8a54a9a
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/guice/JspHandlerProvider.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server.guice;
+
+import com.google.inject.Inject;
+import com.google.inject.Provider;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.server.JspHandler;
+import org.apache.oozie.server.WebRootResourceLocator;
+import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.Services;
+
+import java.io.File;
+
+public class JspHandlerProvider implements Provider<JspHandler> {
+ public static final String OOZIE_JSP_TMP_DIR = "oozie.jsp.tmp.dir";
+ public static final String EMBEDDED_JETTY_JSP_DIR = "embedded-jetty-jsp";
+ private final Configuration oozieConfiguration;
+
+ @Inject
+ public JspHandlerProvider(final Services oozieServices) {
+ oozieConfiguration = oozieServices.get(ConfigurationService.class).getConf();
+ }
+
+ @Override
+ public JspHandler get() {
+ final File tempDir = new File(oozieConfiguration.get(OOZIE_JSP_TMP_DIR), EMBEDDED_JETTY_JSP_DIR);
+
+ return new JspHandler(tempDir, new WebRootResourceLocator());
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/guice/OozieGuiceModule.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/guice/OozieGuiceModule.java b/server/src/main/java/org/apache/oozie/server/guice/OozieGuiceModule.java
new file mode 100644
index 0000000..bb79f0f
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/guice/OozieGuiceModule.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server.guice;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Singleton;
+import org.apache.oozie.server.JspHandler;
+import org.apache.oozie.service.Services;
+import org.eclipse.jetty.rewrite.handler.RewriteHandler;
+import org.eclipse.jetty.security.ConstraintSecurityHandler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.webapp.WebAppContext;
+
+public class OozieGuiceModule extends AbstractModule {
+ @Override
+ protected void configure() {
+ bind(Services.class).toProvider(ServicesProvider.class).in(Singleton.class);
+
+ bind(Server.class).toProvider(JettyServerProvider.class).in(Singleton.class);
+
+ bind(WebAppContext.class).in(Singleton.class);
+
+ bind(ConstraintSecurityHandler.class).toProvider(ConstraintSecurityHandlerProvider.class).in(Singleton.class);
+
+ bind(JspHandler.class).toProvider(JspHandlerProvider.class).in(Singleton.class);
+
+ bind(RewriteHandler.class).toProvider(RewriteHandlerProvider.class).in(Singleton.class);
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/guice/RewriteHandlerProvider.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/guice/RewriteHandlerProvider.java b/server/src/main/java/org/apache/oozie/server/guice/RewriteHandlerProvider.java
new file mode 100644
index 0000000..e54d0cb
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/guice/RewriteHandlerProvider.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server.guice;
+
+import com.google.inject.Inject;
+import com.google.inject.Provider;
+import org.eclipse.jetty.rewrite.handler.RedirectPatternRule;
+import org.eclipse.jetty.rewrite.handler.RewriteHandler;
+
+class RewriteHandlerProvider implements Provider<RewriteHandler> {
+ private final RewriteHandler rewriteHandler;
+
+ @Override
+ public RewriteHandler get() {
+ return rewriteHandler;
+ }
+
+ @Inject
+ public RewriteHandlerProvider(final RedirectPatternRule redirectPatternRule) {
+ this.rewriteHandler = new RewriteHandler();
+
+ redirectPatternRule.setPattern("");
+ redirectPatternRule.setLocation("/oozie");
+ redirectPatternRule.setTerminating(true);
+
+ this.rewriteHandler.addRule(redirectPatternRule);
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/guice/ServicesProvider.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/guice/ServicesProvider.java b/server/src/main/java/org/apache/oozie/server/guice/ServicesProvider.java
new file mode 100644
index 0000000..cc4ed17
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/guice/ServicesProvider.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server.guice;
+
+import com.google.inject.Provider;
+import org.apache.oozie.service.ServiceException;
+import org.apache.oozie.service.Services;
+
+class ServicesProvider implements Provider<Services> {
+ @Override
+ public Services get() {
+ try {
+ final Services oozieServices = new Services();
+
+ oozieServices.init();
+
+ return oozieServices;
+ } catch (ServiceException e) {
+ throw new ExceptionInInitializerError(
+ String.format("Could not instantiate Oozie services. [e.message=%s]", e.getMessage()));
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/resources/checkstyle-header.txt
----------------------------------------------------------------------
diff --git a/server/src/main/resources/checkstyle-header.txt b/server/src/main/resources/checkstyle-header.txt
new file mode 100644
index 0000000..4247452
--- /dev/null
+++ b/server/src/main/resources/checkstyle-header.txt
@@ -0,0 +1,17 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/resources/checkstyle.xml
----------------------------------------------------------------------
diff --git a/server/src/main/resources/checkstyle.xml b/server/src/main/resources/checkstyle.xml
new file mode 100644
index 0000000..6e8be5d
--- /dev/null
+++ b/server/src/main/resources/checkstyle.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.2//EN" "http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<module name="Checker">
+
+ <module name="RegexpSingleline">
+ <property name="severity" value="warning"/>
+ <property name="format" value="\s+$"/>
+ <property name="message" value="Line has trailing spaces."/>
+ </module>
+
+ <module name="Header">
+ <property name="headerFile" value="${checkstyle.header.file}"/>
+ </module>
+
+ <module name="TreeWalker">
+ <module name="LineLength">
+ <property name="severity" value="warning"/>
+ <property name="max" value="132"/>
+ </module>
+ </module>
+
+</module>
+
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/test/java/org/apache/oozie/server/TestEmbeddedOozieServer.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/oozie/server/TestEmbeddedOozieServer.java b/server/src/test/java/org/apache/oozie/server/TestEmbeddedOozieServer.java
new file mode 100644
index 0000000..0f36e8c
--- /dev/null
+++ b/server/src/test/java/org/apache/oozie/server/TestEmbeddedOozieServer.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.ServiceException;
+import org.apache.oozie.service.Services;
+import org.eclipse.jetty.rewrite.handler.RewriteHandler;
+import org.eclipse.jetty.security.ConstraintSecurityHandler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.isA;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+
+/**
+ * Server tests
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestEmbeddedOozieServer {
+ @Mock private JspHandler mockJspHandler;
+ @Mock private Services mockServices;
+ @Mock private SslContextFactory mockSSLContextFactory;
+ @Mock private SSLServerConnectorFactory mockSSLServerConnectorFactory;
+ @Mock private Server mockServer;
+ @Mock private ServerConnector mockServerConnector;
+ @Mock private ConfigurationService mockConfigService;
+ @Mock private Configuration mockConfiguration;
+ @Mock private RewriteHandler mockOozieRewriteHandler;
+ @Mock private EmbeddedOozieServer embeddedOozieServer;
+ @Mock private WebAppContext servletContextHandler;
+ @Mock private ServletMapper oozieServletMapper;
+ @Mock private FilterMapper oozieFilterMapper;
+ @Mock private ConstraintSecurityHandler constraintSecurityHandler;
+
+ @Before public void setUp() {
+ embeddedOozieServer = new EmbeddedOozieServer(mockServer, mockJspHandler, mockServices, mockSSLServerConnectorFactory,
+ mockOozieRewriteHandler, servletContextHandler, oozieServletMapper, oozieFilterMapper, constraintSecurityHandler);
+
+ doReturn("11000").when(mockConfiguration).get("oozie.http.port");
+ doReturn("11443").when(mockConfiguration).get("oozie.https.port");
+ doReturn("65536").when(mockConfiguration).get("oozie.http.request.header.size");
+ doReturn("65536").when(mockConfiguration).get("oozie.http.response.header.size");
+ doReturn("42").when(mockConfiguration).get("oozie.server.threadpool.max.threads");
+ doReturn(mockConfiguration).when(mockConfigService).getConf();
+ doReturn(mockConfigService).when(mockServices).get(ConfigurationService.class);
+ }
+
+ @After public void tearDown() {
+ verify(mockServices).get(ConfigurationService.class);
+
+ verifyNoMoreInteractions(
+ mockJspHandler,
+ mockServices,
+ mockServerConnector,
+ mockSSLServerConnectorFactory);
+ }
+
+ @Test
+ public void testServerSetup() throws Exception {
+ doReturn("false").when(mockConfiguration).get("oozie.https.enabled");
+ embeddedOozieServer.setup();
+ verify(mockJspHandler).setupWebAppContext(isA(WebAppContext.class));
+ }
+
+ @Test
+ public void testSecureServerSetup() throws Exception {
+ doReturn("true").when(mockConfiguration).get("oozie.https.enabled");
+
+ ServerConnector mockSecuredServerConnector = new ServerConnector(embeddedOozieServer.server);
+ doReturn(mockSecuredServerConnector)
+ .when(mockSSLServerConnectorFactory)
+ .createSecureServerConnector(anyInt(), any(Configuration.class), any(Server.class));
+
+ embeddedOozieServer.setup();
+
+ verify(mockJspHandler).setupWebAppContext(isA(WebAppContext.class));
+ verify(mockSSLServerConnectorFactory).createSecureServerConnector(
+ isA(Integer.class), isA(Configuration.class), isA(Server.class));
+ }
+
+ @Test(expected=NumberFormatException.class)
+ public void numberFormatExceptionThrownWithInvalidHttpPort() throws ServiceException, IOException, URISyntaxException {
+ doReturn("INVALID_PORT").when(mockConfiguration).get("oozie.http.port");
+ embeddedOozieServer.setup();
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/test/java/org/apache/oozie/server/TestJspHandler.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/oozie/server/TestJspHandler.java b/server/src/test/java/org/apache/oozie/server/TestJspHandler.java
new file mode 100644
index 0000000..741aa5d
--- /dev/null
+++ b/server/src/test/java/org/apache/oozie/server/TestJspHandler.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public class TestJspHandler {
+ @Rule
+ public ExpectedException expectedException = ExpectedException.none();
+
+ @Mock File mockScratchDir;
+ @Mock WebAppContext mockWebAppContext;
+ @Mock WebRootResourceLocator mockWebRootResourceLocator;
+
+ private JspHandler jspHandler;
+
+ @Before
+ public void setUp() throws Exception {
+ jspHandler = new JspHandler(mockScratchDir, mockWebRootResourceLocator);
+ when(mockWebRootResourceLocator.getWebRootResourceUri()).thenReturn(new URI("/webroot"));
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ verify(mockScratchDir).exists();
+ }
+
+ @Test
+ public void scratchDir_Is_Created_When_Setup_Called_And_ScratchDir_Did_Not_Exist() throws IOException, URISyntaxException {
+ when(mockScratchDir.exists()).thenReturn(false);
+ when(mockScratchDir.mkdirs()).thenReturn(true);
+
+ jspHandler.setupWebAppContext(mockWebAppContext);
+
+ verify(mockScratchDir).mkdirs();
+ }
+
+ @Test
+ public void scratchDir_Cannot_Be_Created_When_Setup_Called_And_ScratchDir_Did_Not_Exist()
+ throws IOException, URISyntaxException {
+ when(mockScratchDir.exists()).thenReturn(false);
+ when(mockScratchDir.mkdirs()).thenReturn(false);
+
+ expectedException.expect(IOException.class);
+ jspHandler.setupWebAppContext(mockWebAppContext);
+
+ verify(mockScratchDir).mkdirs();
+ }
+
+ @Test
+ public void scratchDir_Is_Reused_When_Setup_Called_And_ScratchDir_Existed() throws IOException, URISyntaxException {
+ when(mockScratchDir.exists()).thenReturn(true);
+
+ jspHandler.setupWebAppContext(mockWebAppContext);
+
+ verify(mockScratchDir, times(0)).mkdirs();
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/test/java/org/apache/oozie/server/TestSSLServerConnectorFactory.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/oozie/server/TestSSLServerConnectorFactory.java b/server/src/test/java/org/apache/oozie/server/TestSSLServerConnectorFactory.java
new file mode 100644
index 0000000..9634da8
--- /dev/null
+++ b/server/src/test/java/org/apache/oozie/server/TestSSLServerConnectorFactory.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import org.apache.hadoop.conf.Configuration;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+
+/**
+ * Server tests
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestSSLServerConnectorFactory {
+ @Mock private SslContextFactory mockSSLContextFactory;
+ @Mock private SSLServerConnectorFactory mockSSLServerConnectorFactory;
+ @Mock private Server mockServer;
+ @Mock private ServerConnector mockServerConnector;
+
+ private Configuration testConfig;
+ private SSLServerConnectorFactory sslServerConnectorFactory;
+
+ @Before public void setUp() {
+ testConfig = new Configuration();
+ testConfig.set("oozie.https.truststore.file", "test_truststore_file");
+ testConfig.set("oozie.https.truststore.pass", "trustpass");
+ testConfig.set("oozie.https.keystore.file", "test_keystore_file");
+ testConfig.set("oozie.https.keystore.pass", "keypass");
+ testConfig.set("oozie.http.port", "11000");
+ testConfig.set("oozie.http.request.header.size", "65536");
+ testConfig.set("oozie.http.response.header.size", "65536");
+ testConfig.set("oozie.https.include.protocols", "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2");
+ testConfig.set("oozie.https.exclude.cipher.suites",
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_DES_CBC_SHA," +
+ "SSL_DHE_RSA_WITH_DES_CBC_SHA,SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA," +
+ "SSL_RSA_WITH_RC4_128_MD5");
+
+ sslServerConnectorFactory = new SSLServerConnectorFactory(mockSSLContextFactory);
+ }
+
+ @After
+ public void tearDown() {
+ verify(mockSSLContextFactory).setTrustStorePath(anyString());
+ verify(mockSSLContextFactory).setTrustStorePassword(anyString());
+ verify(mockSSLContextFactory).setKeyStorePath(anyString());
+ verify(mockSSLContextFactory).setKeyManagerPassword(anyString());
+ verifyNoMoreInteractions(
+ mockServerConnector,
+ mockSSLServerConnectorFactory);
+ }
+
+ private void verifyDefaultExcludeCipherSuites() {
+ verify(mockSSLContextFactory).setExcludeCipherSuites(
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA",
+ "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA",
+ "SSL_RSA_WITH_DES_CBC_SHA",
+ "SSL_DHE_RSA_WITH_DES_CBC_SHA",
+ "SSL_RSA_EXPORT_WITH_RC4_40_MD5",
+ "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA",
+ "SSL_RSA_WITH_RC4_128_MD5");
+ }
+
+ private void verifyDefaultIncludeProtocols() {
+ verify(mockSSLContextFactory).setIncludeProtocols(
+ "TLSv1",
+ "SSLv2Hello",
+ "TLSv1.1",
+ "TLSv1.2");
+ }
+
+ @Test
+ public void includeProtocolsHaveDefaultValues() throws Exception {
+ sslServerConnectorFactory.createSecureServerConnector(42, testConfig, mockServer);
+
+ verifyDefaultIncludeProtocols();
+ verifyDefaultExcludeCipherSuites();
+ }
+
+ @Test
+ public void includeProtocolsCanBeSetViaConfigFile() throws Exception {
+ SSLServerConnectorFactory sslServerConnectorFactory = new SSLServerConnectorFactory(mockSSLContextFactory);
+ testConfig.set("oozie.https.include.protocols", "TLSv1,TLSv1.2");
+ sslServerConnectorFactory.createSecureServerConnector(42, testConfig, mockServer);
+
+ verify(mockSSLContextFactory).setIncludeProtocols(
+ "TLSv1",
+ "TLSv1.2");
+ }
+
+ @Test
+ public void excludeCipherSuitesHaveDefaultValues() throws Exception {
+ sslServerConnectorFactory.createSecureServerConnector(42, testConfig, mockServer);
+
+ verifyDefaultExcludeCipherSuites();
+ verifyDefaultIncludeProtocols();
+ }
+
+ @Test
+ public void excludeCipherSuitesCanBeSetViaConfigFile() throws Exception {
+ testConfig.set("oozie.https.exclude.cipher.suites","TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,"
+ + "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA");
+
+ sslServerConnectorFactory.createSecureServerConnector(42, testConfig, mockServer);
+
+ verify(mockSSLContextFactory).setExcludeCipherSuites(
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA",
+ "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA",
+ "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA");
+ verifyDefaultIncludeProtocols();
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/src/main/assemblies/distro-jetty.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-jetty.xml b/src/main/assemblies/distro-jetty.xml
new file mode 100644
index 0000000..a4bee03
--- /dev/null
+++ b/src/main/assemblies/distro-jetty.xml
@@ -0,0 +1,155 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>distro</id>
+ <formats>
+ <format>dir</format>
+ <format>tar.gz</format>
+ </formats>
+ <includeBaseDirectory>true</includeBaseDirectory>
+ <baseDirectory>oozie-${project.version}</baseDirectory>
+ <fileSets>
+ <!-- Oozie configuration files -->
+ <fileSet>
+ <directory>${basedir}/../core/src/main/conf/</directory>
+ <outputDirectory>/conf</outputDirectory>
+ <includes>
+ <include>**</include>
+ </includes>
+ </fileSet>
+ <!-- Distro files, readme, licenses, etc -->
+ <fileSet>
+ <directory>${basedir}/../</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>license.txt</include>
+ <include>notice.txt</include>
+ <include>readme.txt</include>
+ <include>release-log.txt</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Client -->
+ <fileSet>
+ <directory>${basedir}/../client/target/oozie-client-${project.version}-client/oozie-client-${project.version}/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Tools -->
+ <fileSet>
+ <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/libtools</directory>
+ <outputDirectory>/libtools</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ </fileSet>
+ <!-- Oozie Login Server Example war and jar -->
+ <fileSet>
+ <directory>${basedir}/../login/target</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>oozie-login.war</include>
+ <include>oozie-login.jar</include>
+ </includes>
+ <fileMode>0555</fileMode>
+ </fileSet>
+ <!-- Oozie Server - embedded jetty -->
+ <fileSet>
+ <directory>${basedir}/../server/target/</directory>
+ <outputDirectory>/embedded-oozie-server</outputDirectory>
+ <includes>
+ <include>oozie-server*.jar</include>
+ <include>**/jetty*.jar</include>
+ <include>**/*jsp*.jar</include>
+ <include>**/mail*.jar</include>
+ <include>**/apache*.jar</include>
+ <include>**/commons-el*.jar</include>
+ <include>**/javax.servlet-api-3.1.0.jar</include>
+ <include>**/jasper*jar</include>
+ <include>**/taglibs-*jar</include>
+ <include>**/org.eclipse.jdt.core-*jar</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/../webapp/target/oozie-webapp-${project.version}</directory>
+ <outputDirectory>/embedded-oozie-server/webapp</outputDirectory>
+ <excludes>
+ <exclude>**/web.xml</exclude>
+ </excludes>
+ </fileSet>
+ </fileSets>
+ <files>
+ <!-- Oozie configuration files -->
+ <file>
+ <source>${basedir}/../core/src/main/resources/oozie-default.xml</source>
+ <outputDirectory>/conf</outputDirectory>
+ </file>
+ <!-- Oozie core jar -->
+ <file>
+ <source>${basedir}/../core/target/oozie-core-${project.version}.jar</source>
+ <outputDirectory>/oozie-core</outputDirectory>
+ </file>
+ <!-- Oozie core test jar -->
+ <file>
+ <source>${basedir}/../core/target/oozie-core-${project.version}-tests.jar</source>
+ <outputDirectory>/oozie-core</outputDirectory>
+ </file>
+ <!-- Oozie Documentation -->
+ <file>
+ <source>${basedir}/../docs/target/oozie-docs-${project.version}-docs.zip</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>docs.zip</destName>
+ </file>
+ <!-- Oozie Client TAR.GZ -->
+ <file>
+ <source>${basedir}/../client/target/oozie-client-${project.version}-client.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie-client-${project.version}.tar.gz</destName>
+ </file>
+ <!-- Oozie examples TAR.GZ -->
+ <file>
+ <source>${basedir}/../examples/target/oozie-examples-${project.version}-examples.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie-examples.tar.gz</destName>
+ </file>
+ <!-- Oozie sharelib TAR.GZ -->
+ <file>
+ <source>${basedir}/../sharelib/target/oozie-sharelib-${project.version}.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <fileMode>0444</fileMode>
+ </file>
+ </files>
+</assembly>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/src/main/assemblies/distro-tomcat.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-tomcat.xml b/src/main/assemblies/distro-tomcat.xml
new file mode 100644
index 0000000..d7018a3
--- /dev/null
+++ b/src/main/assemblies/distro-tomcat.xml
@@ -0,0 +1,153 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>distro</id>
+ <formats>
+ <format>dir</format>
+ <format>tar.gz</format>
+ </formats>
+ <includeBaseDirectory>true</includeBaseDirectory>
+ <baseDirectory>oozie-${project.version}</baseDirectory>
+ <fileSets>
+ <!-- Oozie configuration files -->
+ <fileSet>
+ <directory>${basedir}/../core/src/main/conf/</directory>
+ <outputDirectory>/conf</outputDirectory>
+ <includes>
+ <include>**</include>
+ </includes>
+ </fileSet>
+ <!-- Distro files, readme, licenses, etc -->
+ <fileSet>
+ <directory>${basedir}/../</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>license.txt</include>
+ <include>notice.txt</include>
+ <include>readme.txt</include>
+ <include>release-log.txt</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Client -->
+ <fileSet>
+ <directory>${basedir}/../client/target/oozie-client-${project.version}-client/oozie-client-${project.version}/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Tools -->
+ <fileSet>
+ <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/libtools</directory>
+ <outputDirectory>/libtools</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ </fileSet>
+ <!-- Embedded Tomcat -->
+ <fileSet>
+ <directory>${basedir}/target/tomcat/oozie-server</directory>
+ <outputDirectory>/oozie-server</outputDirectory>
+ <excludes>
+ <exclude>bin/*.sh</exclude>
+ </excludes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/target/tomcat/oozie-server/bin</directory>
+ <outputDirectory>/oozie-server/bin</outputDirectory>
+ <includes>
+ <include>*.sh</include>
+ </includes>
+ <fileMode>0555</fileMode>
+ </fileSet>
+ <!-- Oozie Login Server Example war and jar -->
+ <fileSet>
+ <directory>${basedir}/../login/target</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>oozie-login.war</include>
+ <include>oozie-login.jar</include>
+ </includes>
+ <fileMode>0555</fileMode>
+ </fileSet>
+ </fileSets>
+ <files>
+ <!-- Oozie configuration files -->
+ <file>
+ <source>${basedir}/../core/src/main/resources/oozie-default.xml</source>
+ <outputDirectory>/conf</outputDirectory>
+ </file>
+ <!-- Oozie core jar -->
+ <file>
+ <source>${basedir}/../core/target/oozie-core-${project.version}.jar</source>
+ <outputDirectory>/oozie-core</outputDirectory>
+ </file>
+ <!-- Oozie core test jar -->
+ <file>
+ <source>${basedir}/../core/target/oozie-core-${project.version}-tests.jar</source>
+ <outputDirectory>/oozie-core</outputDirectory>
+ </file>
+ <!-- Oozie war -->
+ <file>
+ <source>${basedir}/../webapp/target/oozie-webapp-${project.version}.war</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie.war</destName>
+ </file>
+ <!-- Oozie Documentation -->
+ <file>
+ <source>${basedir}/../docs/target/oozie-docs-${project.version}-docs.zip</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>docs.zip</destName>
+ </file>
+ <!-- Oozie Client TAR.GZ -->
+ <file>
+ <source>${basedir}/../client/target/oozie-client-${project.version}-client.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie-client-${project.version}.tar.gz</destName>
+ </file>
+ <!-- Oozie examples TAR.GZ -->
+ <file>
+ <source>${basedir}/../examples/target/oozie-examples-${project.version}-examples.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie-examples.tar.gz</destName>
+ </file>
+ <!-- Oozie sharelib TAR.GZ -->
+ <file>
+ <source>${basedir}/../sharelib/target/oozie-sharelib-${project.version}.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <fileMode>0444</fileMode>
+ </file>
+ </files>
+</assembly>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/src/main/assemblies/distro.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro.xml b/src/main/assemblies/distro.xml
deleted file mode 100644
index 1ffbfd6..0000000
--- a/src/main/assemblies/distro.xml
+++ /dev/null
@@ -1,155 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<assembly>
- <id>distro</id>
- <formats>
- <format>dir</format>
- <format>tar.gz</format>
- </formats>
- <includeBaseDirectory>true</includeBaseDirectory>
- <baseDirectory>oozie-${project.version}</baseDirectory>
- <fileSets>
- <!-- Oozie configuration files -->
- <fileSet>
- <directory>${basedir}/../core/src/main/conf/</directory>
- <outputDirectory>/conf</outputDirectory>
- <includes>
- <include>**</include>
- </includes>
- </fileSet>
- <!-- Distro files, readme, licenses, etc -->
- <fileSet>
- <directory>${basedir}/../</directory>
- <outputDirectory>/</outputDirectory>
- <includes>
- <include>license.txt</include>
- <include>notice.txt</include>
- <include>readme.txt</include>
- <include>release-log.txt</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>${basedir}/src/main/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <!-- Client -->
- <fileSet>
- <directory>${basedir}/../client/target/oozie-client-${project.version}-client/oozie-client-${project.version}/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <!-- Tools -->
- <fileSet>
- <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/libtools</directory>
- <outputDirectory>/libtools</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- </fileSet>
- <!-- Embedded Tomcat -->
- <fileSet>
- <directory>${basedir}/target/tomcat/oozie-server</directory>
- <outputDirectory>/oozie-server</outputDirectory>
- <excludes>
- <exclude>bin/*.sh</exclude>
- </excludes>
- </fileSet>
- <fileSet>
- <directory>${basedir}/target/tomcat/oozie-server/bin</directory>
- <outputDirectory>/oozie-server/bin</outputDirectory>
- <includes>
- <include>*.sh</include>
- </includes>
- <fileMode>0555</fileMode>
- </fileSet>
- <!-- Oozie Login Server Example war and jar -->
- <fileSet>
- <directory>${basedir}/../login/target</directory>
- <outputDirectory>/</outputDirectory>
- <includes>
- <include>oozie-login.war</include>
- <include>oozie-login.jar</include>
- </includes>
- <fileMode>0555</fileMode>
- </fileSet>
-
- </fileSets>
- <files>
- <!-- Oozie configuration files -->
- <file>
- <source>${basedir}/../core/src/main/resources/oozie-default.xml</source>
- <outputDirectory>/conf</outputDirectory>
- <destName>oozie-default.xml.reference</destName>
- </file>
- <!-- Oozie core jar -->
- <file>
- <source>${basedir}/../core/target/oozie-core-${project.version}.jar</source>
- <outputDirectory>/oozie-core</outputDirectory>
- </file>
- <!-- Oozie core test jar -->
- <file>
- <source>${basedir}/../core/target/oozie-core-${project.version}-tests.jar</source>
- <outputDirectory>/oozie-core</outputDirectory>
- </file>
- <!-- Oozie war -->
- <file>
- <source>${basedir}/../webapp/target/oozie-webapp-${project.version}.war</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie.war</destName>
- </file>
- <!-- Oozie Documentation -->
- <file>
- <source>${basedir}/../docs/target/oozie-docs-${project.version}-docs.zip</source>
- <outputDirectory>/</outputDirectory>
- <destName>docs.zip</destName>
- </file>
- <!-- Oozie Client TAR.GZ -->
- <file>
- <source>${basedir}/../client/target/oozie-client-${project.version}-client.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie-client-${project.version}.tar.gz</destName>
- </file>
- <!-- Oozie examples TAR.GZ -->
- <file>
- <source>${basedir}/../examples/target/oozie-examples-${project.version}-examples.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie-examples.tar.gz</destName>
- </file>
- <!-- Oozie sharelib TAR.GZ -->
- <file>
- <source>${basedir}/../sharelib/target/oozie-sharelib-${project.version}.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <fileMode>0444</fileMode>
- </file>
- </files>
-</assembly>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/webapp/src/main/webapp/403.html
----------------------------------------------------------------------
diff --git a/webapp/src/main/webapp/403.html b/webapp/src/main/webapp/403.html
new file mode 100644
index 0000000..f3183d9
--- /dev/null
+++ b/webapp/src/main/webapp/403.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <title>Error 403 Not Found</title>
+</head>
+<body>
+ <h2>HTTP ERROR 403</h2>
+ <p>Problem accessing page. Reason:</p>
+ <pre> Forbidden</pre>
+ <p></p>
+</body>
+</html>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/webapp/src/main/webapp/404.html
----------------------------------------------------------------------
diff --git a/webapp/src/main/webapp/404.html b/webapp/src/main/webapp/404.html
new file mode 100644
index 0000000..a953df2
--- /dev/null
+++ b/webapp/src/main/webapp/404.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <title>Error 404 Not Found</title>
+</head>
+<body>
+ <h2>HTTP ERROR 404</h2>
+ <p>Problem accessing page Reason:</p>
+ <pre> Not Found</pre>
+ <p></p>
+</body>
+</html>
[42/48] oozie git commit: OOZIE-2591 fix unit test
testReadingRecoveryIdFails
Posted by pb...@apache.org.
OOZIE-2591 fix unit test testReadingRecoveryIdFails
Change-Id: I1b9e83268c8d63a085b47a83550bdb68a7468474
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/2e78c23e
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/2e78c23e
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/2e78c23e
Branch: refs/heads/oya
Commit: 2e78c23ea6c6f04f1e2d898a3d5eadcf6e79a3e9
Parents: 6789d36
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Tue Nov 29 17:14:31 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Tue Nov 29 17:14:31 2016 +0100
----------------------------------------------------------------------
.../test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/2e78c23e/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
index 777cfdd..cb943bb 100644
--- a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
+++ b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
@@ -448,7 +448,7 @@ public class TestLauncherAM {
failureDetails.expectedExceptionMessage("IO error")
.expectedErrorCode(EXIT_CODE_0)
- .expectedErrorReason("IO error, IO error")
+ .expectedErrorReason("IO error")
.withStackTrace();
assertFailedExecution();
[10/48] oozie git commit: OOZIE-2724 coord:current resolves
monthly/yearly dependencies incorrectly (satishsaley via shwethags)
Posted by pb...@apache.org.
OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/7cebc260
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/7cebc260
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/7cebc260
Branch: refs/heads/oya
Commit: 7cebc2604d6813fd1e2929a1b8334a8ba8df4cf8
Parents: ca01c28
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Tue Nov 15 15:19:06 2016 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Tue Nov 15 15:19:06 2016 +0530
----------------------------------------------------------------------
.../apache/oozie/coord/CoordELFunctions.java | 7 ++--
.../oozie/coord/TestCoordELFunctions.java | 36 ++++++++++++++++++++
release-log.txt | 1 +
3 files changed, 40 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/7cebc260/core/src/main/java/org/apache/oozie/coord/CoordELFunctions.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/coord/CoordELFunctions.java b/core/src/main/java/org/apache/oozie/coord/CoordELFunctions.java
index 22eb1c3..925a7aa 100644
--- a/core/src/main/java/org/apache/oozie/coord/CoordELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/coord/CoordELFunctions.java
@@ -62,8 +62,6 @@ public class CoordELFunctions {
public static final long MINUTE_MSEC = 60 * 1000L;
public static final long HOUR_MSEC = 60 * MINUTE_MSEC;
public static final long DAY_MSEC = 24 * HOUR_MSEC;
- public static final long MONTH_MSEC = 30 * DAY_MSEC;
- public static final long YEAR_MSEC = 365 * DAY_MSEC;
/**
* Used in defining the frequency in 'day' unit. <p> domain: <code> val > 0</code> and should be integer.
*
@@ -1401,10 +1399,11 @@ public class CoordELFunctions {
break;
case MONTH:
case END_OF_MONTH:
- instanceCount[0] = (int) ((effectiveTime.getTime() - datasetInitialInstance.getTime()) / MONTH_MSEC);
+ int diffYear = calEffectiveTime.get(Calendar.YEAR) - current.get(Calendar.YEAR);
+ instanceCount[0] = diffYear * 12 + calEffectiveTime.get(Calendar.MONTH) - current.get(Calendar.MONTH);
break;
case YEAR:
- instanceCount[0] = (int) ((effectiveTime.getTime() - datasetInitialInstance.getTime()) / YEAR_MSEC);
+ instanceCount[0] = calEffectiveTime.get(Calendar.YEAR) - current.get(Calendar.YEAR);
break;
default:
throw new IllegalArgumentException("Unhandled dataset time unit " + dsTimeUnit);
http://git-wip-us.apache.org/repos/asf/oozie/blob/7cebc260/core/src/test/java/org/apache/oozie/coord/TestCoordELFunctions.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/coord/TestCoordELFunctions.java b/core/src/test/java/org/apache/oozie/coord/TestCoordELFunctions.java
index fb7e030..be60133 100644
--- a/core/src/test/java/org/apache/oozie/coord/TestCoordELFunctions.java
+++ b/core/src/test/java/org/apache/oozie/coord/TestCoordELFunctions.java
@@ -702,6 +702,42 @@ public class TestCoordELFunctions extends XTestCase {
expr = "${coord:current(1)}";
assertEquals("2009-06-01T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
+
+ // Case 8
+ ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
+ ds.setFrequency(1);
+ ds.setTimeZone(DateUtils.getTimeZone("UTC"));
+ ds.setInitInstance(DateUtils.parseDateOozieTZ("2010-01-01T00:00Z"));
+ appInst.setNominalTime(DateUtils.parseDateOozieTZ("2016-10-31T00:55Z"));
+ CoordELFunctions.configureEvaluator(eval, ds, appInst);
+
+ expr = "${coord:current(0)}";
+ assertEquals("2016-10-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
+
+ expr = "${coord:current(1)}";
+ assertEquals("2016-11-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
+
+ expr = "${coord:current(-1)}";
+ assertEquals("2016-09-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
+
+ // Test with YEAR
+ ds.setTimeUnit(TimeUnit.YEAR);
+ ds.setEndOfDuration(TimeUnit.YEAR);
+ ds.setFrequency(1);
+ ds.setTimeZone(DateUtils.getTimeZone("UTC"));
+ // Initial instance is far behind to accumulate effect of leap years
+ ds.setInitInstance(DateUtils.parseDateOozieTZ("1963-01-01T00:00Z"));
+ appInst.setNominalTime(DateUtils.parseDateOozieTZ("2016-10-31T00:55Z"));
+ CoordELFunctions.configureEvaluator(eval, ds, appInst);
+
+ expr = "${coord:current(0)}";
+ assertEquals("2016-01-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
+
+ expr = "${coord:current(1)}";
+ assertEquals("2017-01-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
+
+ expr = "${coord:current(-1)}";
+ assertEquals("2015-01-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testOffset() throws Exception {
http://git-wip-us.apache.org/repos/asf/oozie/blob/7cebc260/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 3071c7b..fead396 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -10,6 +10,7 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
+OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
OOZIE-2674 Improve oozie commads documentation (abhishekbafna via rkanter)
OOZIE-2710 Oozie HCatalog example workflow fails (abhishekbafna via shwethags)
[23/48] oozie git commit: OOZIE-2729 change JT to RM in methods
Posted by pb...@apache.org.
OOZIE-2729 change JT to RM in methods
Change-Id: I42f305b8e24bb7f37c5c8b032e9eff67dc3b4027
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/ddbd90f8
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/ddbd90f8
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/ddbd90f8
Branch: refs/heads/oya
Commit: ddbd90f88c511a714c35c39aedbce0ab2624724d
Parents: 3b6daff
Author: Peter Cseh <ge...@cloudera.com>
Authored: Tue Nov 22 14:54:00 2016 +0100
Committer: Peter Cseh <ge...@cloudera.com>
Committed: Tue Nov 22 14:54:00 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/TestDistCpActionExecutor.java | 2 +-
.../action/hadoop/TestJavaActionExecutor.java | 99 ++++++++++----------
.../oozie/action/hadoop/TestLauncher.java | 2 +-
.../oozie/action/hadoop/TestOozieJobInfo.java | 4 +-
.../apache/oozie/action/hadoop/TestRerun.java | 2 +-
.../action/hadoop/TestShellActionExecutor.java | 20 ++--
.../oozie/action/hadoop/TestShellMain.java | 2 +-
.../oozie/TestSubWorkflowActionExecutor.java | 4 +-
.../coord/TestCoordActionStartXCommand.java | 2 +-
.../command/coord/TestCoordRerunXCommand.java | 6 +-
.../command/wf/TestActionCheckXCommand.java | 6 +-
.../command/wf/TestActionStartXCommand.java | 8 +-
.../oozie/command/wf/TestReRunXCommand.java | 2 +-
.../oozie/command/wf/TestSubmitXCommand.java | 2 +-
.../wf/TestWorkflowActionKillXCommand.java | 2 +-
.../apache/oozie/event/TestEventGeneration.java | 2 +-
.../TestCoordJobsGetForPurgeJPAExecutor.java | 2 +-
...estCoordJobsToBeMaterializedJPAExecutor.java | 2 +-
.../service/TestHadoopAccessorService.java | 12 +--
.../oozie/service/TestRecoveryService.java | 2 +-
.../oozie/service/TestShareLibService.java | 14 +--
.../org/apache/oozie/test/TestXTestCase.java | 2 +-
.../org/apache/oozie/test/XDataTestCase.java | 10 +-
.../java/org/apache/oozie/test/XFsTestCase.java | 4 +-
.../java/org/apache/oozie/test/XTestCase.java | 28 +++---
.../action/hadoop/TestHiveActionExecutor.java | 13 +--
.../action/hadoop/TestHive2ActionExecutor.java | 15 +--
.../action/hadoop/TestPigActionExecutor.java | 8 +-
.../apache/oozie/action/hadoop/TestPyspark.java | 8 +-
.../action/hadoop/TestSparkActionExecutor.java | 13 +--
.../action/hadoop/TestSqoopActionExecutor.java | 17 +---
.../hadoop/TestMapReduceActionExecutor.java | 44 ++++-----
32 files changed, 157 insertions(+), 202 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
index c1f0e6f..c953bb8 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
@@ -50,7 +50,7 @@ public class TestDistCpActionExecutor extends ActionExecutorTestCase{
os.close();
String actionXml = "<distcp>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<arg>" + inputPath + "</arg>"+
"<arg>" + outputPath + "</arg>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index bfc8ab4..2b31207 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.ActionExecutor;
@@ -87,7 +86,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
setSystemProperty("oozie.service.ActionService.executor.classes", JavaActionExecutor.class.getName());
setSystemProperty("oozie.service.HadoopAccessorService.action.configurations",
- "*=hadoop-conf," + getJobTrackerUri() + "=action-conf");
+ "*=hadoop-conf," + getResourceManagerUri() + "=action-conf");
setSystemProperty(WorkflowAppService.SYSTEM_LIB_PATH, getFsTestCaseDir().toUri().getPath() + "/systemlib");
new File(getTestCaseConfDir(), "action-conf").mkdir();
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("test-action-config.xml");
@@ -135,7 +134,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
fail();
}
- Element actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ Element actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<job-xml>job.xml</job-xml>" + "<job-xml>job2.xml</job-xml>" + "<configuration>" +
"<property><name>oozie.launcher.a</name><value>LA</value></property>" +
@@ -196,7 +195,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
conf = ae.createBaseHadoopConf(context, actionXml);
assertEquals(protoConf.get(WorkflowAppService.HADOOP_USER), conf.get(WorkflowAppService.HADOOP_USER));
- assertEquals(getJobTrackerUri(), conf.get("yarn.resourcemanager.address"));
+ assertEquals(getResourceManagerUri(), conf.get("yarn.resourcemanager.address"));
assertEquals(getNameNodeUri(), conf.get("fs.default.name"));
conf = ae.createBaseHadoopConf(context, actionXml);
@@ -268,7 +267,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// FIXME - this file exists - must use the correct path
// assertTrue(getFileSystem().exists(new Path(context.getActionDir(), LauncherMapper.ACTION_CONF_XML)));
- actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>mapred.job.queue.name</name><value>AQ</value></property>" +
"<property><name>oozie.action.sharelib.for.java</name><value>sharelib-java</value></property>" +
@@ -281,7 +280,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("AQ", actionConf.get("mapred.job.queue.name"));
assertEquals("sharelib-java", actionConf.get("oozie.action.sharelib.for.java"));
- actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -291,7 +290,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
assertEquals("LQ", conf.get("mapred.job.queue.name"));
- actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>" +
"<property><name>mapred.job.queue.name</name><value>AQ</value></property>" +
@@ -354,7 +353,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testSimpestSleSubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"</java>";
@@ -372,7 +371,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testOutputSubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>out</arg>" +
@@ -397,7 +396,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testIdSwapSubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>id</arg>" +
@@ -426,7 +425,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
IOUtils.copyStream(is, os);
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester2.class.getName() + "</main-class>" +
"<file>" + appJarPath.toString() + "</file>" +
@@ -447,7 +446,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExit0SubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>exit0</arg>" +
@@ -468,7 +467,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExit1SubmitError() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>exit1</arg>" +
@@ -491,7 +490,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExceptionSubmitException() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>exception</arg>" +
@@ -513,7 +512,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExceptionSubmitThrowable() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>throwable</arg>" +
@@ -535,7 +534,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testKill() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"</java>";
@@ -552,7 +551,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testRecovery() throws Exception {
final String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"</java>";
@@ -612,7 +611,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<java>" +
- " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <main-class>CLASS</main-class>" +
" <file>" + jar.toString() + "</file>\n" +
@@ -720,7 +719,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<java>" +
- " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <main-class>CLASS</main-class>" +
" <file>" + jar.toString() +
@@ -802,7 +801,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
fs.mkdirs(delete);
String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<prepare>" +
"<mkdir path='" + mkdir + "'/>" +
@@ -1190,7 +1189,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
}
public void testJavaOpts() throws Exception {
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1216,7 +1215,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1241,7 +1240,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1268,7 +1267,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1295,7 +1294,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("-Xmx200m JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("-Xmx200m JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1333,7 +1332,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Path jar2Path = new Path(actionLibPath, "jar2.jar");
getFileSystem().create(jar2Path).close();
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>" +
"<property><name>oozie.launcher.oozie.libpath</name><value>" + actionLibPath + "</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -1356,7 +1355,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Path jar3Path = new Path(getFsTestCaseDir(), "jar3.jar");
getFileSystem().create(jar3Path).close();
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>" +
"<property><name>oozie.launcher.oozie.libpath</name><value>" + jar3Path + "</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -1375,7 +1374,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertTrue(cacheFilesStr.contains(jar3Path.toString()));
// Test adding a directory and a file (comma separated)
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>" +
"<property><name>oozie.launcher.oozie.libpath</name><value>" + actionLibPath + "," + jar3Path +
"</value></property>" +
@@ -1424,7 +1423,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Path jar5Path = new Path(otherShareLibPath, "jar5.jar");
getFileSystem().create(jar5Path).close();
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>MAIN-CLASS</main-class>" +
"</java>";
@@ -1504,7 +1503,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
return "java-action-executor";
}
};
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNode2Uri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName()
+ "</main-class>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -1539,7 +1538,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testFilesystemScheme() throws Exception {
try {
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName()
+ "</main-class>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -1562,7 +1561,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLDefaults_launcherACLsSetToDefault() throws Exception {
// CASE: launcher specific ACLs not configured - set defaults
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>mapreduce.job.acl-view-job</name><value>VIEWER</value></property>" +
"<property><name>mapreduce.job.acl-modify-job</name><value>MODIFIER</value></property>" +
@@ -1587,7 +1586,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLDefaults_noFalseChange() throws Exception {
// CASE: launcher specific ACLs configured, but MR job ACLs not configured i.e. null. Check for no false changes to null
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapreduce.job.acl-view-job</name><value>V</value></property>" +
"<property><name>oozie.launcher.mapreduce.job.acl-modify-job</name><value>M</value></property>" +
@@ -1612,7 +1611,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLDefaults_explicitLauncherAndActionSettings() throws Exception {
// CASE: launcher specific ACLs configured, as well as MR job ACLs configured. Check that NO overriding with defaults
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapreduce.job.acl-view-job</name><value>V</value></property>" +
"<property><name>oozie.launcher.mapreduce.job.acl-modify-job</name><value>M</value></property>" +
@@ -1640,7 +1639,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLModifyJob() throws Exception {
// CASE 1: If user has provided modify-acl value
// then it should NOT be overridden by group name
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>mapreduce.job.acl-modify-job</name><value>MODIFIER</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -1657,7 +1656,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// CASE 2: If user has not provided modify-acl value
// then it equals group name
- actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
"</java>";
@@ -1789,7 +1788,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
//Test UpdateCOnfForJavaTmpDir for launcherConf
String actionXml1 = "<java>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1825,7 +1824,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
//Test UpdateConfForJavaTmpDIr for actionConf
String actionXml = "<java>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1853,7 +1852,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Element actionXml = XmlUtils
.parseXml("<java>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1887,7 +1886,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Element actionXmlWithTez = XmlUtils
.parseXml("<java>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1903,7 +1902,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Element actionXmlATSDisabled = XmlUtils
.parseXml("<java>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -2080,7 +2079,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// (first should be used)
// 4. Fully qualified path located in the second filesystem
String str = "<java>"
- + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNode2Uri() + "</name-node>"
+ "<job-xml>" + jobXmlAbsolutePath.toString() + "</job-xml>"
+ "<job-xml>job2.xml</job-xml>"
@@ -2170,7 +2169,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
is = new FileInputStream(jarFile);
IOUtils.copyStream(is, os3);
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNode2Uri() + "</name-node>" +
"<job-xml>job.xml</job-xml>" +
"<main-class>"+ LauncherMainTester.class.getName() + "</main-class>" +
@@ -2213,7 +2212,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
w.write("A = load '$INPUT' using PigStorage(':');\n");
w.write("store B into '$OUTPUT' USING PigStorage();\n");
w.close();
- String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<prepare>" + "<delete path='outputdir' />" + "</prepare>"
+ "<configuration>" + "<property>" + "<name>mapred.compress.map.output</name>" + "<value>true</value>"
+ "</property>" + "<property>" + "<name>mapred.job.queue.name</name>" + "<value>default</value>"
@@ -2237,7 +2236,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testDefaultConfigurationInLauncher() throws Exception {
JavaActionExecutor ae = new JavaActionExecutor();
Element actionXmlWithConfiguration = XmlUtils.parseXml(
- "<java>" + "<job-tracker>" + getJobTrackerUri() +"</job-tracker>" +
+ "<java>" + "<job-tracker>" + getResourceManagerUri() +"</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<configuration>" +
"<property><name>oozie.launcher.a</name><value>AA</value></property>" +
@@ -2246,16 +2245,16 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
"<main-class>MAIN-CLASS</main-class>" +
"</java>");
Element actionXmlWithoutConfiguration = XmlUtils.parseXml(
- "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>MAIN-CLASS</main-class>" +
"</java>");
Configuration conf = new Configuration(false);
Assert.assertEquals(0, conf.size());
- conf.set("yarn.resourcemanager.address", getJobTrackerUri());
+ conf.set("yarn.resourcemanager.address", getResourceManagerUri());
ae.setupLauncherConf(conf, actionXmlWithConfiguration, null, null);
- assertEquals(getJobTrackerUri(), conf.get("yarn.resourcemanager.address"));
+ assertEquals(getResourceManagerUri(), conf.get("yarn.resourcemanager.address"));
assertEquals("AA", conf.get("oozie.launcher.a"));
assertEquals("AA", conf.get("a"));
assertEquals("action.barbar", conf.get("oozie.launcher.action.foofoo"));
@@ -2264,9 +2263,9 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
conf = new Configuration(false);
Assert.assertEquals(0, conf.size());
- conf.set("yarn.resourcemanager.address", getJobTrackerUri());
+ conf.set("yarn.resourcemanager.address", getResourceManagerUri());
ae.setupLauncherConf(conf, actionXmlWithoutConfiguration, null, null);
- assertEquals(getJobTrackerUri(), conf.get("yarn.resourcemanager.address"));
+ assertEquals(getResourceManagerUri(), conf.get("yarn.resourcemanager.address"));
assertEquals("action.barbar", conf.get("oozie.launcher.action.foofoo"));
assertEquals("action.barbar", conf.get("action.foofoo"));
assertEquals(3, conf.size());
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
index e7b9534..1005274 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
@@ -79,7 +79,7 @@ public class TestLauncher extends XFsTestCase {
jobConf.setInt("mapred.reduce.max.attempts", 1);
jobConf.set("mapreduce.framework.name", "yarn");
- jobConf.set("mapred.job.tracker", getJobTrackerUri());
+ jobConf.set("mapred.job.tracker", getResourceManagerUri());
jobConf.set("fs.default.name", getNameNodeUri());
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
index 9efacdd..d3dd377 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
@@ -202,7 +202,7 @@ public class TestOozieJobInfo extends XDataTestCase {
writer2.write(APP1);
writer2.close();
jobConf.set(OozieClient.USER_NAME, getTestUser());
- jobConf.set("myJobTracker", getJobTrackerUri());
+ jobConf.set("myJobTracker", getResourceManagerUri());
jobConf.set("myNameNode", getNameNodeUri());
jobConf.set("wfAppPath", new Path(wfAppPath, "workflow.xml").toString());
jobConf.set("mrclass", MapperReducerForTest.class.getName());
@@ -239,7 +239,7 @@ public class TestOozieJobInfo extends XDataTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getJobTrackerUri());
+ conf.set("jobTracker", getResourceManagerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("appName", "bundle-app-name");
conf.set("start", "2009-02-01T00:00Z");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
index cf9ce1b..fbb0512 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
@@ -90,7 +90,7 @@ public class TestRerun extends XFsTestCase {
final OozieClient wfClient = LocalOozie.getClient();
Properties conf = wfClient.createConfiguration();
conf.setProperty(OozieClient.APP_PATH, new Path(appPath, "workflow.xml").toString());
- conf.setProperty("jobTracker", getJobTrackerUri());
+ conf.setProperty("jobTracker", getResourceManagerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("mrclass", MapperReducerForTest.class.getName());
conf.setProperty("input", input.toString());
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
index 931307e..fa13548 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
@@ -26,16 +26,10 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.util.Shell;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.ActionService;
-import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.PropertiesUtils;
@@ -81,7 +75,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
public void testSetupMethods() throws Exception {
ShellActionExecutor ae = new ShellActionExecutor();
assertNull(ae.getLauncherClasses());
- Element actionXml = XmlUtils.parseXml("<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ Element actionXml = XmlUtils.parseXml("<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<exec>SCRIPT</exec>"
+ "<argument>a=A</argument>" + "<argument>b=B</argument>" + "</shell>");
@@ -123,7 +117,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample Shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
+ "<argument>" + SHELL_SCRIPTNAME + "</argument>" + "<argument>A</argument>" + "<argument>B</argument>"
+ "<env-var>var1=val1</env-var>" + "<env-var>var2=val2</env-var>" + "<file>" + script.toString()
@@ -149,7 +143,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample Shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>oozie.action.shell.setup.hadoop.conf.dir</name><value>true</value></property>"
+ "</configuration>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
@@ -193,7 +187,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample Shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>oozie.action.shell.setup.hadoop.conf.dir</name><value>true</value></property>"
+ "<property><name>oozie.action.shell.setup.hadoop.conf.dir.write.log4j.properties"
@@ -223,7 +217,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
+ "<argument>" + SHELL_SCRIPTNAME + "</argument>" + "<argument>A</argument>" + "<argument>B</argument>" + "<file>"
+ script.toString() + "#" + script.getName() + "</file>" + "</shell>";
@@ -249,7 +243,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.write(PERL_SCRIPT_CONTENT);
w.close();
// Create a Sample Shell action using the perl script
- String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>perl</exec>" + "<argument>script.pl</argument>"
+ "<argument>A</argument>" + "<argument>B</argument>" + "<env-var>my_var1=my_val1</env-var>" + "<file>"
+ script.toString() + "#" + script.getName() + "</file>" + "<capture-output/>" + "</shell>";
@@ -276,7 +270,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
String envValueHavingEqualSign = "a=b;c=d";
// Create sample shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
+ "<argument>" + SHELL_SCRIPTNAME + "</argument>" + "<argument>A</argument>" + "<argument>B</argument>"
+ "<env-var>var1=val1</env-var>" + "<env-var>var2=" + envValueHavingEqualSign + "</env-var>" + "<file>" + script.toString()
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
index d184a5a..eb442f0 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
@@ -47,7 +47,7 @@ public class TestShellMain extends ShellTestCase {
jobConf.setInt("mapred.map.tasks", 1);
jobConf.setInt("mapred.map.max.attempts", 1);
jobConf.setInt("mapred.reduce.max.attempts", 1);
- jobConf.set("mapred.job.tracker", getJobTrackerUri());
+ jobConf.set("mapred.job.tracker", getResourceManagerUri());
jobConf.set("fs.default.name", getNameNodeUri());
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java b/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
index e074d48..f5c7c2e 100644
--- a/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
@@ -559,7 +559,7 @@ public class TestSubWorkflowActionExecutor extends ActionExecutorTestCase {
"<start to='java' />" +
" <action name='java'>" +
"<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + JavaSleepAction.class.getName() + "</main-class>" +
"<arg>exit0</arg>" +
@@ -850,7 +850,7 @@ public class TestSubWorkflowActionExecutor extends ActionExecutorTestCase {
"<start to='java' />" +
"<action name='java'>" +
"<java>" +
- " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <configuration>" +
" <property>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java b/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
index db66c72..172479b 100644
--- a/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
@@ -198,7 +198,7 @@ public class TestCoordActionStartXCommand extends XDataTestCase {
jobConf.setProperty(OozieClient.COORDINATOR_APP_PATH, coordAppPath.toString());
jobConf.setProperty(OozieClient.USER_NAME, getTestUser());
jobConf.setProperty(OozieClient.GROUP_NAME, getTestGroup());
- jobConf.setProperty("myJobTracker", getJobTrackerUri());
+ jobConf.setProperty("myJobTracker", getResourceManagerUri());
jobConf.setProperty("myNameNode", getNameNodeUri());
jobConf.setProperty("wfAppPath", new Path(wfAppPath, "workflow.xml").toString());
jobConf.setProperty("mrclass", MapperReducerForTest.class.getName());
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
index 6ae0ae2..d956b6c 100644
--- a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
@@ -1083,7 +1083,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getJobTrackerUri());
+ conf.setProperty("jobTracker", getResourceManagerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
@@ -1293,7 +1293,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getJobTrackerUri());
+ conf.setProperty("jobTracker", getResourceManagerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
@@ -1306,7 +1306,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
"<start to='java' />" +
" <action name='java'>" +
"<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + JavaSleepAction.class.getName() + "</main-class>" +
"<arg>exit0</arg>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
index 135c34a..4151182 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
@@ -368,7 +368,7 @@ public class TestActionCheckXCommand extends XDataTestCase {
// At this point, the launcher job has started (but not finished)
// Now, shutdown the job tracker to pretend it has gone down during the launcher job
- executeWhileJobTrackerIsShutdown(new ShutdownJobTrackerExecutable() {
+ executeWhileResourceManagerIsShutdown(new ShutdownResourceManagerExecutable() {
@Override
public void execute() throws Exception {
assertEquals(0, action1.getRetries());
@@ -505,7 +505,7 @@ public class TestActionCheckXCommand extends XDataTestCase {
// At this point, the launcher job has finished and the map-reduce action has started (but not finished)
// Now, shutdown the job tracker to pretend it has gone down during the map-reduce job
- executeWhileJobTrackerIsShutdown(new ShutdownJobTrackerExecutable() {
+ executeWhileResourceManagerIsShutdown(new ShutdownResourceManagerExecutable() {
@Override
public void execute() throws Exception {
assertEquals(0, action1.getRetries());
@@ -647,7 +647,7 @@ public class TestActionCheckXCommand extends XDataTestCase {
w.close();
String actionXml = "<map-reduce>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<prepare><delete path=\"" + outputDir.toString() + "\"/></prepare>" +
"<configuration>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
index 59cfdd2..7c7ea5e 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
@@ -48,7 +47,6 @@ import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQ
import org.apache.oozie.executor.jpa.WorkflowJobInsertJPAExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery;
-import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.InstrumentationService;
import org.apache.oozie.service.JPAService;
import org.apache.oozie.service.LiteWorkflowStoreService;
@@ -331,7 +329,7 @@ public class TestActionStartXCommand extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" +
@@ -417,7 +415,7 @@ public class TestActionStartXCommand extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
@@ -524,7 +522,7 @@ public class TestActionStartXCommand extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
index 45cbbc4..298dd1e 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
@@ -144,7 +144,7 @@ public class TestReRunXCommand extends XDataTestCase {
final OozieClient wfClient = LocalOozie.getClient();
Properties conf = wfClient.createConfiguration();
conf.setProperty("nameNode", getNameNodeUri());
- conf.setProperty("jobTracker", getJobTrackerUri());
+ conf.setProperty("jobTracker", getResourceManagerUri());
conf.setProperty(OozieClient.APP_PATH, getTestCaseFileUri("workflow.xml"));
conf.setProperty(OozieClient.USER_NAME, getTestUser());
conf.setProperty("cmd4", "echo1"); //expected to fail
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
index 47ff8ca..c1e2a5b 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
@@ -345,7 +345,7 @@ public class TestSubmitXCommand extends XDataTestCase {
writeToFile(wfXml, workflowUri);
Configuration conf = new XConfiguration();
conf.set("nameNode", getNameNodeUri());
- conf.set("jobTracker", getJobTrackerUri());
+ conf.set("jobTracker", getResourceManagerUri());
conf.set("foobarRef", "foobarRef");
conf.set("key", "job_prop_value");
conf.set(OozieClient.APP_PATH, workflowUri);
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index ef75f14..4509586 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -132,7 +132,7 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
action.setExternalChildIDs(childID);
String actionXml = "<map-reduce>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<configuration>" +
"<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName() +
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java b/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
index 59d0420..6fd033f 100644
--- a/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
+++ b/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
@@ -707,7 +707,7 @@ public class TestEventGeneration extends XDataTestCase {
action.setLastCheckTime(new Date());
action.setPendingOnly();
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<main-class>" + "${dummy}" + "</java>";
action.setConf(actionXml);
jpaService.execute(new WorkflowActionInsertJPAExecutor(action));
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
index 39bbee4..6d6acb4 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
@@ -177,7 +177,7 @@ public class TestCoordJobsGetForPurgeJPAExecutor extends XFsTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getJobTrackerUri());
+ conf.setProperty("jobTracker", getResourceManagerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
index 3b72ecd..cd6c35f 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
@@ -165,7 +165,7 @@ public class TestCoordJobsToBeMaterializedJPAExecutor extends XFsTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getJobTrackerUri());
+ conf.setProperty("jobTracker", getResourceManagerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
index e671b7a..6ded256 100644
--- a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
@@ -20,13 +20,11 @@ package org.apache.oozie.service;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.security.authorize.*;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.oozie.test.XFsTestCase;
import org.apache.hadoop.mapred.JobConf;
@@ -154,7 +152,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
public void testCreateJobClient() throws Exception {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
- JobConf conf = has.createJobConf(getJobTrackerUri());
+ JobConf conf = has.createJobConf(getResourceManagerUri());
JobClient jc = has.createJobClient(getTestUser(), conf);
assertNotNull(jc);
@@ -169,7 +167,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
}
JobConf conf2 = new JobConf(false);
- conf2.set("mapred.job.tracker", getJobTrackerUri());
+ conf2.set("mapred.job.tracker", getResourceManagerUri());
try {
has.createJobClient(getTestUser(), conf2);
fail("Should have thrown exception because Configuration not created by HadoopAccessorService");
@@ -181,7 +179,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
public void testCreateYarnClient() throws Exception {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
- JobConf conf = has.createJobConf(getJobTrackerUri());
+ JobConf conf = has.createJobConf(getResourceManagerUri());
YarnClient yc = has.createYarnClient(getTestUser(), conf);
assertNotNull(yc);
@@ -197,7 +195,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
}
JobConf conf2 = new JobConf(false);
- conf2.set("yarn.resourcemanager.address", getJobTrackerUri());
+ conf2.set("yarn.resourcemanager.address", getResourceManagerUri());
try {
has.createYarnClient(getTestUser(), conf2);
fail("Should have thrown exception because Configuration not created by HadoopAccessorService");
@@ -209,7 +207,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
public void testCreateFileSystem() throws Exception {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
- JobConf conf = has.createJobConf(getJobTrackerUri());
+ JobConf conf = has.createJobConf(getResourceManagerUri());
FileSystem fs = has.createFileSystem(getTestUser(), new URI(getNameNodeUri()), conf);
assertNotNull(fs);
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java b/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
index a3270e9..c949efe 100644
--- a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
@@ -905,7 +905,7 @@ public class TestRecoveryService extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
index 390545d..dc6f6ef 100644
--- a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
@@ -175,7 +175,7 @@ public class TestShareLibService extends XFsTestCase {
try {
services.init();
- String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
@@ -203,7 +203,7 @@ public class TestShareLibService extends XFsTestCase {
conf.set(ShareLibService.SHIP_LAUNCHER_JAR, "true");
try {
services.init();
- String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
@@ -249,7 +249,7 @@ public class TestShareLibService extends XFsTestCase {
try {
services.init();
- String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<property><name>oozie.action.sharelib.for.pig</name><value>pig_10</value></property>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -537,7 +537,7 @@ public class TestShareLibService extends XFsTestCase {
createFile(libpath.toString() + Path.SEPARATOR + "pig_10" + Path.SEPARATOR + "pig-10.jar");
createFile(libpath.toString() + Path.SEPARATOR + "oozie" + Path.SEPARATOR + "oozie_luncher.jar");
- String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<property><name>oozie.action.sharelib.for.pig</name><value>pig_10</value></property>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -718,7 +718,7 @@ public class TestShareLibService extends XFsTestCase {
private URI[] setUpPigJob(boolean useSystemSharelib) throws Exception {
services.init();
- String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node></pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
@@ -863,7 +863,7 @@ public class TestShareLibService extends XFsTestCase {
// Test hive-site.xml not in distributed cache
setupSharelibConf("hive-site.xml", "oozie.hive_conf");
- String actionXml = "<hive>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<hive>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</hive>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -943,7 +943,7 @@ public class TestShareLibService extends XFsTestCase {
prop.put("oozie.hive_conf", "/user/test/" + sharelibPath + "/hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
- String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
index 735f80a..9c0faa3 100644
--- a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
@@ -193,7 +193,7 @@ public class TestXTestCase extends TestCase {
setSystemProperty(TestConstants.OOZIE_TEST_NAME_NODE, "hdfs://xyz:9000");
setSystemProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "xyz:9001");
assertEquals("hdfs://xyz:9000", getNameNodeUri());
- assertEquals("xyz:9001", getJobTrackerUri());
+ assertEquals("xyz:9001", getResourceManagerUri());
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
index 2105e2f..93d291b 100644
--- a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
@@ -1094,7 +1094,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getJobTrackerUri());
+ conf.set("jobTracker", getResourceManagerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("appName", "bundle-app-name");
conf.set("coordName1", "coord1");
@@ -1320,7 +1320,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration jobConf = new XConfiguration();
jobConf.set(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
jobConf.set(OozieClient.USER_NAME, getTestUser());
- jobConf.set("jobTracker", getJobTrackerUri());
+ jobConf.set("jobTracker", getResourceManagerUri());
jobConf.set("nameNode", getNameNodeUri());
jobConf.set("wfAppPath", wfAppPath.toString());
@@ -1434,7 +1434,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
@@ -1492,7 +1492,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getJobTrackerUri());
+ conf.set("jobTracker", getResourceManagerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("appName", "bundle-app-name");
conf.set("coordName1", "coord1");
@@ -1562,7 +1562,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getJobTrackerUri());
+ conf.set("jobTracker", getResourceManagerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("coordName1", "coord1");
conf.set("coordName2", "coord2");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
index 7a419d1..a1bda01 100644
--- a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
@@ -171,8 +171,8 @@ public abstract class XFsTestCase extends XTestCase {
* @throws HadoopAccessorException thrown if the JobClient could not be obtained.
*/
protected JobClient createJobClient() throws HadoopAccessorException {
- JobConf conf = has.createJobConf(getJobTrackerUri());
- conf.set("mapred.job.tracker", getJobTrackerUri());
+ JobConf conf = has.createJobConf(getResourceManagerUri());
+ conf.set("mapred.job.tracker", getResourceManagerUri());
conf.set("fs.default.name", getNameNodeUri());
return has.createJobClient(getTestUser(), conf);
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 4442513..9820167 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -455,7 +455,7 @@ public abstract class XTestCase extends TestCase {
*
* @return the job tracker URI.
*/
- protected String getJobTrackerUri() {
+ protected String getResourceManagerUri() {
return System.getProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "localhost:9001");
}
@@ -576,7 +576,6 @@ public abstract class XTestCase extends TestCase {
private void createHdfsPathsAndSetupPermissions() throws IOException {
final FileSystem fileSystem = dfsCluster.getFileSystem();
-
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("target/test-data" + "/minicluster/mapred"));
fileSystem.mkdirs(new Path("/user"));
@@ -729,14 +728,14 @@ public abstract class XTestCase extends TestCase {
if (yarnCluster != null) {
return testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
} else {
- return testConfigurations.createPristineJobConf(getJobTrackerUri(), getNameNodeUri());
+ return testConfigurations.createPristineJobConf(getResourceManagerUri(), getNameNodeUri());
}
}
/**
- * A 'closure' used by {@link XTestCase#executeWhileJobTrackerIsShutdown} method.
+ * A 'closure' used by {@link XTestCase#executeWhileResourceManagerIsShutdown} method.
*/
- public interface ShutdownJobTrackerExecutable {
+ public interface ShutdownResourceManagerExecutable {
/**
* Execute some code
@@ -747,16 +746,23 @@ public abstract class XTestCase extends TestCase {
}
/**
- * Execute some code, expressed via a {@link ShutdownJobTrackerExecutable}, while the JobTracker is shutdown. Once the code has
- * finished, the JobTracker is restarted (even if an exception occurs).
+ * Execute some code, expressed via a {@link ShutdownResourceManagerExecutable}, while the ResourceManager is shutdown. Once the code has
+ * finished, the ResourceManager is restarted (even if an exception occurs).
*
- * @param executable The ShutdownJobTrackerExecutable to execute while the JobTracker is shutdown
+ * @param executable The ShutdownResourceManagerExecutable to execute while the ResourceManager is shutdown
*/
- protected void executeWhileJobTrackerIsShutdown(final ShutdownJobTrackerExecutable executable) {
+ protected void executeWhileResourceManagerIsShutdown(final ShutdownResourceManagerExecutable executable) {
+ for (int i=0; i<yarnCluster.getNumOfResourceManager();i++){
+ yarnCluster.getResourceManager(i).stop();
+ }
try {
executable.execute();
} catch (final Exception e) {
throw new RuntimeException(e);
+ } finally {
+ for (int i=0; i<yarnCluster.getNumOfResourceManager();i++){
+ yarnCluster.getResourceManager(i).start();
+ }
}
}
@@ -786,7 +792,7 @@ public abstract class XTestCase extends TestCase {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
- final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getResourceManagerUri());
final YarnClient yarnClient = Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), jobConf);
try {
@@ -822,7 +828,7 @@ public abstract class XTestCase extends TestCase {
protected YarnApplicationState getYarnApplicationState(final String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
- final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getResourceManagerUri());
// This is needed here because we need a mutable final YarnClient
final MutableObject<YarnClient> yarnClientMO = new MutableObject<YarnClient>(null);
try {
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
index ed9bba3..7d36bbf 100644
--- a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
+++ b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
@@ -22,7 +22,6 @@ import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
-import java.io.StringReader;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.Arrays;
@@ -32,22 +31,14 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.ClassUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
-import org.apache.oozie.util.XmlUtils;
-import org.jdom.Element;
import org.jdom.Namespace;
public class TestHiveActionExecutor extends ActionExecutorTestCase {
@@ -113,7 +104,7 @@ public class TestHiveActionExecutor extends ActionExecutorTestCase {
"</configuration>" +
"<script>" + HIVE_SCRIPT_FILENAME + "</script>" +
"</hive>";
- return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri());
+ return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri());
}
private String getActionQueryXml(String query) {
@@ -142,7 +133,7 @@ public class TestHiveActionExecutor extends ActionExecutorTestCase {
"<value>DEBUG</value>" +
"</property>" +
"</configuration>";
- return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri())
+ return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri())
+ "<query>" + query + "</query>" +
"</hive>";
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
index 58e703f..030942a 100644
--- a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
+++ b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
@@ -19,7 +19,6 @@
package org.apache.oozie.action.hadoop;
import java.io.OutputStreamWriter;
-import java.io.StringReader;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.ArrayList;
@@ -29,15 +28,9 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XmlUtils;
@@ -77,7 +70,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
assertEquals(classes, ae.getLauncherClasses());
Element actionXml = XmlUtils.parseXml("<hive2>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<jdbc-url>jdbc:hive2://foo:1234/bar</jdbc-url>" +
"<password>pass</password>" +
@@ -119,7 +112,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
String sampleQuery = "SELECT count(*) from foobar";
Element actionXml = XmlUtils.parseXml("<hive2 xmlns=\"uri:oozie:hive2-action:0.2\">" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<jdbc-url>jdbc:hive2://foo:1234/bar</jdbc-url>" +
"<password>pass</password>" +
@@ -176,7 +169,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
"<password>dummy</password>" +
"<script>" + HIVE_SCRIPT_FILENAME + "</script>" +
"</hive2>";
- return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
+ return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
}
private String getQueryActionXml(String query) {
@@ -186,7 +179,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
"<configuration></configuration>" +
"<jdbc-url>{2}</jdbc-url>" +
"<password>dummy</password>";
- String expanded = MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
+ String expanded = MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
// MessageFormat strips single quotes, which causes issues with the hive query parser
return expanded +
"<query>" + query + "</query>" + "</hive2>";
[04/48] oozie git commit: Fix test failures after merge
Posted by pb...@apache.org.
Fix test failures after merge
Change-Id: Ife9db9c06edad000d3813b40c86f9cfc356c8251
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/9e2acd07
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/9e2acd07
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/9e2acd07
Branch: refs/heads/oya
Commit: 9e2acd07236c80c9a429106c7e57086c8c78e2ed
Parents: b99933b
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Wed Nov 9 12:55:25 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Wed Nov 9 12:55:25 2016 +0100
----------------------------------------------------------------------
sharelib/pig/pom.xml | 6 ++++++
sharelib/streaming/pom.xml | 6 ++++++
2 files changed, 12 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/9e2acd07/sharelib/pig/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/pig/pom.xml b/sharelib/pig/pom.xml
index 1698bd3..2f8c758 100644
--- a/sharelib/pig/pom.xml
+++ b/sharelib/pig/pom.xml
@@ -82,6 +82,12 @@
</dependency>
<dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/9e2acd07/sharelib/streaming/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/streaming/pom.xml b/sharelib/streaming/pom.xml
index 4f73272..d65c396 100644
--- a/sharelib/streaming/pom.xml
+++ b/sharelib/streaming/pom.xml
@@ -62,6 +62,12 @@
</dependency>
<dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<scope>provided</scope>
[06/48] oozie git commit: OOZIE-2731 Set
yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage
to a higher value in tests (asasvari via rkanter)
Posted by pb...@apache.org.
OOZIE-2731 Set yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage to a higher value in tests (asasvari via rkanter)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/ca4eac43
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/ca4eac43
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/ca4eac43
Branch: refs/heads/oya
Commit: ca4eac431b4c800cfcc3c0f72cb6f5ea8ea74937
Parents: 8250fbd
Author: Robert Kanter <rk...@cloudera.com>
Authored: Fri Nov 11 09:45:47 2016 +0100
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Fri Nov 11 09:45:47 2016 +0100
----------------------------------------------------------------------
core/src/test/java/org/apache/oozie/test/XTestCase.java | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca4eac43/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 70f77be..e1d9068 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -1007,6 +1007,8 @@ public abstract class XTestCase extends TestCase {
conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
// Required to prevent deadlocks with YARN CapacityScheduler
conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
+ // Default value is 90 - if you have low disk space, tests will fail.
+ conf.set("yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage", "99");
return conf;
}
[02/48] oozie git commit: Revert "Test case failure (abhishekbafna
via jaydeepvishwakarma)"
Posted by pb...@apache.org.
Revert "Test case failure (abhishekbafna via jaydeepvishwakarma)"
This reverts commit 225a88cb98012a98adf460363a8dc0ecd8fbfbaf.
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/7bd1cfb1
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/7bd1cfb1
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/7bd1cfb1
Branch: refs/heads/oya
Commit: 7bd1cfb18f9a2ce08975744b16d17145247d0206
Parents: 1b7da69
Author: Robert Kanter <rk...@cloudera.com>
Authored: Wed Nov 9 09:59:06 2016 +0100
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Wed Nov 9 09:59:06 2016 +0100
----------------------------------------------------------------------
.../org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java | 2 +-
release-log.txt | 1 -
2 files changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/7bd1cfb1/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
index a3c7b14..9e439b4 100644
--- a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
+++ b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
@@ -1379,7 +1379,7 @@ public class TestLiteWorkflowAppParser extends XTestCase {
} catch (WorkflowException we) {
assertEquals(ErrorCode.E0757, we.getErrorCode());
assertTrue(we.getMessage().contains("Fork node [f]"));
- assertTrue(we.getMessage().contains("[j2,j1]") || we.getMessage().contains("[j1,j2]"));
+ assertTrue(we.getMessage().contains("[j2,j1]"));
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/7bd1cfb1/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 70ffaa6..565e844 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -9,7 +9,6 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
-OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
OOZIE-2674 Improve oozie commads documentation (abhishekbafna via rkanter)
OOZIE-2710 Oozie HCatalog example workflow fails (abhishekbafna via shwethags)
OOZIE-2705 Oozie Spark action ignores spark.executor.extraJavaOptions and spark.driver.extraJavaOptions (gezapeti via rkanter)
[39/48] oozie git commit: Revert "OOZIE-2729 OYA: Use MiniYARNCluster
in tests. TODO: refactor XTestCase."
Posted by pb...@apache.org.
Revert "OOZIE-2729 OYA: Use MiniYARNCluster in tests. TODO: refactor XTestCase."
This reverts commit d5dcc5cec2e080413e2540f43d3877b4d56f99ad.
Change-Id: Iefe037a8477591a7554b31fe81a399d7e1f86f00
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/67dca9c3
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/67dca9c3
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/67dca9c3
Branch: refs/heads/oya
Commit: 67dca9c31016a3bf7ad00037f1750fce988f1e76
Parents: e5070b1
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Mon Nov 28 14:04:13 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Mon Nov 28 14:04:13 2016 +0100
----------------------------------------------------------------------
.../java/org/apache/oozie/test/XTestCase.java | 440 +++++++++----------
1 file changed, 217 insertions(+), 223 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/67dca9c3/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 784c578..711d41d 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -27,9 +27,14 @@ import java.io.OutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.EnumSet;
import java.net.UnknownHostException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.Map.Entry;
+import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
@@ -50,12 +55,12 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
@@ -142,24 +147,24 @@ public abstract class XTestCase extends TestCase {
OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile();
}
- final String testPropsFile = System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
- final File file = new File(testPropsFile).isAbsolute()
- ? new File(testPropsFile) : new File(OOZIE_SRC_DIR, testPropsFile);
+ String testPropsFile = System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
+ File file = new File(testPropsFile).isAbsolute()
+ ? new File(testPropsFile) : new File(OOZIE_SRC_DIR, testPropsFile);
if (file.exists()) {
System.out.println();
System.out.println("*********************************************************************************");
System.out.println("Loading test system properties from: " + file.getAbsolutePath());
System.out.println();
- final Properties props = new Properties();
+ Properties props = new Properties();
props.load(new FileReader(file));
- for (final Map.Entry entry : props.entrySet()) {
+ for (Map.Entry entry : props.entrySet()) {
if (!System.getProperties().containsKey(entry.getKey())) {
System.setProperty((String) entry.getKey(), (String) entry.getValue());
System.out.println(entry.getKey() + " = " + entry.getValue());
}
else {
System.out.println(entry.getKey() + " IGNORED, using command line value = " +
- System.getProperty((String) entry.getKey()));
+ System.getProperty((String) entry.getKey()));
}
}
System.out.println("*********************************************************************************");
@@ -168,13 +173,14 @@ public abstract class XTestCase extends TestCase {
else {
if (System.getProperty(OOZIE_TEST_PROPERTIES) != null) {
System.err.println();
- System.err.println("ERROR: Specified test file does not exist: " +
- System.getProperty(OOZIE_TEST_PROPERTIES));
+ System.err.println("ERROR: Specified test file does not exist: " +
+ System.getProperty(OOZIE_TEST_PROPERTIES));
System.err.println();
System.exit(-1);
}
}
- } catch (final IOException ex) {
+ }
+ catch (IOException ex) {
throw new RuntimeException(ex);
}
@@ -255,12 +261,12 @@ public abstract class XTestCase extends TestCase {
/**
* Name of the shell command
*/
- protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd" : "bash";
+ protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS)? "cmd": "bash";
/**
* Extension for shell script files
*/
- protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS) ? "cmd" : "sh";
+ protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS)? "cmd": "sh";
/**
* Option for shell command to pass script files
@@ -291,12 +297,12 @@ public abstract class XTestCase extends TestCase {
* @param cleanUpDBTables true if should cleanup the database tables, false if not
* @throws Exception if the test workflow working directory could not be created or there was a problem cleaning the database
*/
- protected void setUp(final boolean cleanUpDBTables) throws Exception {
+ protected void setUp(boolean cleanUpDBTables) throws Exception {
RUNNING_TESTCASES.incrementAndGet();
super.setUp();
- final String baseDir = System.getProperty(OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
+ String baseDir = System.getProperty(OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
String msg = null;
- final File f = new File(baseDir);
+ File f = new File(baseDir);
if (!f.isAbsolute()) {
msg = XLog.format("System property [{0}]=[{1}] must be set to an absolute path", OOZIE_TEST_DIR, baseDir);
}
@@ -313,7 +319,7 @@ public abstract class XTestCase extends TestCase {
f.mkdirs();
if (!f.exists() || !f.isDirectory()) {
System.err.println();
- System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
+ System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
System.exit(-1);
}
hadoopVersion = System.getProperty(HADOOP_VERSION, "0.20.0");
@@ -325,12 +331,12 @@ public abstract class XTestCase extends TestCase {
testCaseConfDir = createTestCaseSubDir("conf");
// load test Oozie site
- final String oozieTestDB = System.getProperty("oozie.test.db", "hsqldb");
- final String defaultOozieSize =
- new File(OOZIE_SRC_DIR, "core/src/test/resources/" + oozieTestDB + "-oozie-site.xml").getAbsolutePath();
- final String customOozieSite = System.getProperty("oozie.test.config.file", defaultOozieSize);
+ String oozieTestDB = System.getProperty("oozie.test.db", "hsqldb");
+ String defaultOozieSize =
+ new File(OOZIE_SRC_DIR, "core/src/test/resources/" + oozieTestDB + "-oozie-site.xml").getAbsolutePath();
+ String customOozieSite = System.getProperty("oozie.test.config.file", defaultOozieSize);
File source = new File(customOozieSite);
- if (!source.isAbsolute()) {
+ if(!source.isAbsolute()) {
source = new File(OOZIE_SRC_DIR, customOozieSite);
}
source = source.getAbsoluteFile();
@@ -340,7 +346,7 @@ public abstract class XTestCase extends TestCase {
}
else {
// If we can't find it, try using the class loader (useful if we're using XTestCase from outside core)
- final URL sourceURL = getClass().getClassLoader().getResource(oozieTestDB + "-oozie-site.xml");
+ URL sourceURL = getClass().getClassLoader().getResource(oozieTestDB + "-oozie-site.xml");
if (sourceURL != null) {
oozieSiteSourceStream = sourceURL.openStream();
}
@@ -348,35 +354,35 @@ public abstract class XTestCase extends TestCase {
// If we still can't find it, then exit
System.err.println();
System.err.println(XLog.format("Custom configuration file for testing does not exist [{0}]",
- source.getAbsolutePath()));
+ source.getAbsolutePath()));
System.err.println();
System.exit(-1);
}
}
// Copy the specified oozie-site file from oozieSiteSourceStream to the test case dir as oozie-site.xml
- final Configuration oozieSiteConf = new Configuration(false);
+ Configuration oozieSiteConf = new Configuration(false);
oozieSiteConf.addResource(oozieSiteSourceStream);
- final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
- final InputStream inputStream = classLoader.getResourceAsStream(ConfigurationService.DEFAULT_CONFIG_FILE);
- final XConfiguration configuration = new XConfiguration(inputStream);
- final String classes = configuration.get(Services.CONF_SERVICE_CLASSES);
+ ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+ InputStream inputStream = classLoader.getResourceAsStream(ConfigurationService.DEFAULT_CONFIG_FILE);
+ XConfiguration configuration = new XConfiguration(inputStream);
+ String classes = configuration.get(Services.CONF_SERVICE_CLASSES);
// Disable sharelib service as it cannot find the sharelib jars
// as maven has target/classes in classpath and not the jar because test phase is before package phase
- oozieSiteConf.set(Services.CONF_SERVICE_CLASSES, classes.replaceAll("org.apache.oozie.service.ShareLibService,", ""));
+ oozieSiteConf.set(Services.CONF_SERVICE_CLASSES, classes.replaceAll("org.apache.oozie.service.ShareLibService,",""));
// Make sure to create the Oozie DB during unit tests
oozieSiteConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
File target = new File(testCaseConfDir, "oozie-site.xml");
oozieSiteConf.writeXml(new FileOutputStream(target));
- final File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
+ File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
hadoopConfDir.mkdir();
- final File actionConfDir = new File(testCaseConfDir, "action-conf");
+ File actionConfDir = new File(testCaseConfDir, "action-conf");
actionConfDir.mkdir();
source = new File(OOZIE_SRC_DIR, "core/src/test/resources/hadoop-config.xml");
InputStream hadoopConfigResourceStream = null;
if (!source.exists()) {
// If we can't find it, try using the class loader (useful if we're using XTestCase from outside core)
- final URL sourceURL = getClass().getClassLoader().getResource("hadoop-config.xml");
+ URL sourceURL = getClass().getClassLoader().getResource("hadoop-config.xml");
if (sourceURL != null) {
hadoopConfigResourceStream = sourceURL.openStream();
}
@@ -409,17 +415,17 @@ public abstract class XTestCase extends TestCase {
}
if (System.getProperty("oozie.test.db.host") == null) {
- System.setProperty("oozie.test.db.host", "localhost");
+ System.setProperty("oozie.test.db.host", "localhost");
}
setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
- setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
+ setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS,"*");
- if (yarnCluster != null) {
- try (final OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"))) {
- final Configuration conf = createJobConfFromYarnCluster();
- conf.writeXml(os);
- }
+ if (mrCluster != null) {
+ OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"));
+ Configuration conf = createJobConfFromMRCluster();
+ conf.writeXml(os);
+ os.close();
}
if (System.getProperty("oozie.test.metastore.server", "false").equals("true")) {
@@ -468,12 +474,12 @@ public abstract class XTestCase extends TestCase {
* reason for the manual parsing instead of an actual File.toURI is because Oozie tests use tokens ${}
* frequently. Something like URI("c:/temp/${HOUR}").toString() will generate escaped values that will break tests
*/
- protected String getTestCaseFileUri(final String relativeUri) {
+ protected String getTestCaseFileUri(String relativeUri) {
String uri = new File(testCaseDir).toURI().toString();
// truncates '/' if the testCaseDir was provided with a fullpath ended with separator
- if (uri.endsWith("/")) {
- uri = uri.substring(0, uri.length() - 1);
+ if (uri.endsWith("/")){
+ uri = uri.substring(0, uri.length() -1);
}
return uri + "/" + relativeUri;
@@ -512,7 +518,7 @@ public abstract class XTestCase extends TestCase {
/**
* Return an alternate test user Id that belongs
- to the test group.
+ to the test group.
*
* @return the user Id.
*/
@@ -556,7 +562,7 @@ public abstract class XTestCase extends TestCase {
* @param testCase testcase instance to obtain the working directory.
* @return the test working directory.
*/
- private String getTestCaseDirInternal(final TestCase testCase) {
+ private String getTestCaseDirInternal(TestCase testCase) {
ParamChecker.notNull(testCase, "testCase");
File dir = new File(System.getProperty(OOZIE_TEST_DIR, "target/test-data"));
dir = new File(dir, "oozietests").getAbsoluteFile();
@@ -565,16 +571,16 @@ public abstract class XTestCase extends TestCase {
return dir.getAbsolutePath();
}
- protected void delete(final File file) throws IOException {
+ protected void delete(File file) throws IOException {
ParamChecker.notNull(file, "file");
if (file.getAbsolutePath().length() < 5) {
throw new RuntimeException(XLog.format("path [{0}] is too short, not deleting", file.getAbsolutePath()));
}
if (file.exists()) {
if (file.isDirectory()) {
- final File[] children = file.listFiles();
+ File[] children = file.listFiles();
if (children != null) {
- for (final File child : children) {
+ for (File child : children) {
delete(child);
}
}
@@ -598,14 +604,14 @@ public abstract class XTestCase extends TestCase {
* @return return the path of the test working directory, it is always an absolute path.
* @throws Exception if the test working directory could not be created or cleaned up.
*/
- private String createTestCaseDir(final TestCase testCase, final boolean cleanup) throws Exception {
- final String testCaseDir = getTestCaseDirInternal(testCase);
+ private String createTestCaseDir(TestCase testCase, boolean cleanup) throws Exception {
+ String testCaseDir = getTestCaseDirInternal(testCase);
System.out.println();
System.out.println(XLog.format("Setting testcase work dir[{0}]", testCaseDir));
if (cleanup) {
delete(new File(testCaseDir));
}
- final File dir = new File(testCaseDir);
+ File dir = new File(testCaseDir);
if (!dir.mkdirs()) {
throw new RuntimeException(XLog.format("Could not create testcase dir[{0}]", testCaseDir));
}
@@ -618,7 +624,7 @@ public abstract class XTestCase extends TestCase {
* @param subDirNames a list of progressively deeper directory names
* @return the absolute path to the created directory.
*/
- protected String createTestCaseSubDir(final String... subDirNames) {
+ protected String createTestCaseSubDir(String... subDirNames) {
ParamChecker.notNull(subDirNames, "subDirName");
if (subDirNames.length == 0) {
throw new RuntimeException(XLog.format("Could not create testcase subdir ''; it already exists"));
@@ -644,12 +650,12 @@ public abstract class XTestCase extends TestCase {
* @param name system property name.
* @param value value to set.
*/
- protected void setSystemProperty(final String name, final String value) {
+ protected void setSystemProperty(String name, String value) {
if (sysProps == null) {
sysProps = new HashMap<String, String>();
}
if (!sysProps.containsKey(name)) {
- final String currentValue = System.getProperty(name);
+ String currentValue = System.getProperty(name);
sysProps.put(name, currentValue);
}
if (value != null) {
@@ -665,7 +671,7 @@ public abstract class XTestCase extends TestCase {
*/
private void resetSystemProperties() {
if (sysProps != null) {
- for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
+ for (Map.Entry<String, String> entry : sysProps.entrySet()) {
if (entry.getValue() != null) {
System.setProperty(entry.getKey(), entry.getValue());
}
@@ -698,11 +704,11 @@ public abstract class XTestCase extends TestCase {
* @param predicate predicate waiting on.
* @return the waited time.
*/
- protected long waitFor(final int timeout, final Predicate predicate) {
+ protected long waitFor(int timeout, Predicate predicate) {
ParamChecker.notNull(predicate, "predicate");
- final XLog log = new XLog(LogFactory.getLog(getClass()));
- final long started = System.currentTimeMillis();
- final long mustEnd = System.currentTimeMillis() + (long) (WAITFOR_RATIO * timeout);
+ XLog log = new XLog(LogFactory.getLog(getClass()));
+ long started = System.currentTimeMillis();
+ long mustEnd = System.currentTimeMillis() + (long)(WAITFOR_RATIO * timeout);
long lastEcho = 0;
try {
long waiting = mustEnd - System.currentTimeMillis();
@@ -720,7 +726,8 @@ public abstract class XTestCase extends TestCase {
log.info("Waiting timed out after [{0}] msec", timeout);
}
return System.currentTimeMillis() - started;
- } catch (final Exception ex) {
+ }
+ catch (Exception ex) {
throw new RuntimeException(ex);
}
}
@@ -730,7 +737,7 @@ public abstract class XTestCase extends TestCase {
*
* @param sleepTime time in milliseconds to wait
*/
- protected void sleep(final int sleepTime) {
+ protected void sleep(int sleepTime) {
waitFor(sleepTime, new Predicate() {
@Override
public boolean evaluate() throws Exception {
@@ -770,7 +777,7 @@ public abstract class XTestCase extends TestCase {
}
public String getKeytabFile() {
- final String defaultFile = new File(System.getProperty("user.home"), "oozie.keytab").getAbsolutePath();
+ String defaultFile = new File(System.getProperty("user.home"), "oozie.keytab").getAbsolutePath();
return System.getProperty("oozie.test.kerberos.keytab.file", defaultFile);
}
@@ -780,7 +787,7 @@ public abstract class XTestCase extends TestCase {
public String getOoziePrincipal() {
return System.getProperty("oozie.test.kerberos.oozie.principal",
- getOozieUser() + "/localhost") + "@" + getRealm();
+ getOozieUser() + "/localhost") + "@" + getRealm();
}
protected MiniHCatServer getHCatalogServer() {
@@ -804,11 +811,12 @@ public abstract class XTestCase extends TestCase {
// needed to cleanup the database and shut them down when done; the test will likely start its own Services later and
// we don't want to interfere
try {
- final Services services = new Services();
+ Services services = new Services();
services.getConf().set(Services.CONF_SERVICE_CLASSES, MINIMAL_SERVICES_FOR_DB_CLEANUP);
services.init();
cleanUpDBTablesInternal();
- } finally {
+ }
+ finally {
if (Services.get() != null) {
Services.get().destroy();
}
@@ -817,70 +825,70 @@ public abstract class XTestCase extends TestCase {
}
private void cleanUpDBTablesInternal() throws StoreException {
- final EntityManager entityManager = Services.get().get(JPAService.class).getEntityManager();
+ EntityManager entityManager = Services.get().get(JPAService.class).getEntityManager();
entityManager.setFlushMode(FlushModeType.COMMIT);
entityManager.getTransaction().begin();
Query q = entityManager.createNamedQuery("GET_WORKFLOWS");
- final List<WorkflowJobBean> wfjBeans = q.getResultList();
- final int wfjSize = wfjBeans.size();
- for (final WorkflowJobBean w : wfjBeans) {
+ List<WorkflowJobBean> wfjBeans = q.getResultList();
+ int wfjSize = wfjBeans.size();
+ for (WorkflowJobBean w : wfjBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_ACTIONS");
- final List<WorkflowActionBean> wfaBeans = q.getResultList();
- final int wfaSize = wfaBeans.size();
- for (final WorkflowActionBean w : wfaBeans) {
+ List<WorkflowActionBean> wfaBeans = q.getResultList();
+ int wfaSize = wfaBeans.size();
+ for (WorkflowActionBean w : wfaBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_COORD_JOBS");
- final List<CoordinatorJobBean> cojBeans = q.getResultList();
- final int cojSize = cojBeans.size();
- for (final CoordinatorJobBean w : cojBeans) {
+ List<CoordinatorJobBean> cojBeans = q.getResultList();
+ int cojSize = cojBeans.size();
+ for (CoordinatorJobBean w : cojBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_COORD_ACTIONS");
- final List<CoordinatorActionBean> coaBeans = q.getResultList();
- final int coaSize = coaBeans.size();
- for (final CoordinatorActionBean w : coaBeans) {
+ List<CoordinatorActionBean> coaBeans = q.getResultList();
+ int coaSize = coaBeans.size();
+ for (CoordinatorActionBean w : coaBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_BUNDLE_JOBS");
- final List<BundleJobBean> bjBeans = q.getResultList();
- final int bjSize = bjBeans.size();
- for (final BundleJobBean w : bjBeans) {
+ List<BundleJobBean> bjBeans = q.getResultList();
+ int bjSize = bjBeans.size();
+ for (BundleJobBean w : bjBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_BUNDLE_ACTIONS");
- final List<BundleActionBean> baBeans = q.getResultList();
- final int baSize = baBeans.size();
- for (final BundleActionBean w : baBeans) {
+ List<BundleActionBean> baBeans = q.getResultList();
+ int baSize = baBeans.size();
+ for (BundleActionBean w : baBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_SLA_EVENTS");
- final List<SLAEventBean> slaBeans = q.getResultList();
- final int slaSize = slaBeans.size();
- for (final SLAEventBean w : slaBeans) {
+ List<SLAEventBean> slaBeans = q.getResultList();
+ int slaSize = slaBeans.size();
+ for (SLAEventBean w : slaBeans) {
entityManager.remove(w);
}
q = entityManager.createQuery("select OBJECT(w) from SLARegistrationBean w");
- final List<SLARegistrationBean> slaRegBeans = q.getResultList();
- final int slaRegSize = slaRegBeans.size();
- for (final SLARegistrationBean w : slaRegBeans) {
+ List<SLARegistrationBean> slaRegBeans = q.getResultList();
+ int slaRegSize = slaRegBeans.size();
+ for (SLARegistrationBean w : slaRegBeans) {
entityManager.remove(w);
}
q = entityManager.createQuery("select OBJECT(w) from SLASummaryBean w");
- final List<SLASummaryBean> sdBeans = q.getResultList();
- final int ssSize = sdBeans.size();
- for (final SLASummaryBean w : sdBeans) {
+ List<SLASummaryBean> sdBeans = q.getResultList();
+ int ssSize = sdBeans.size();
+ for (SLASummaryBean w : sdBeans) {
entityManager.remove(w);
}
@@ -900,49 +908,58 @@ public abstract class XTestCase extends TestCase {
private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null;
- private static MiniYARNCluster yarnCluster = null;
+ // TODO: OYA: replace with MiniYarnCluster or MiniMRYarnCluster
+ private static MiniMRCluster mrCluster = null;
private static MiniHCatServer hcatServer = null;
private static MiniHS2 hiveserver2 = null;
private static HiveConf hs2Config = null;
- private void setUpEmbeddedHadoop(final String testCaseDir) throws Exception {
- if (dfsCluster == null && yarnCluster == null) {
- if (System.getProperty("hadoop.log.dir") == null) {
- System.setProperty("hadoop.log.dir", testCaseDir);
- }
+ private void setUpEmbeddedHadoop(String testCaseDir) throws Exception {
+ if (dfsCluster == null && mrCluster == null) {
+ if (System.getProperty("hadoop.log.dir") == null) {
+ System.setProperty("hadoop.log.dir", testCaseDir);
+ }
// Tell the ClasspathUtils that we're using a mini cluster
ClasspathUtils.setUsingMiniYarnCluster(true);
- final int dataNodes = 2;
- final String oozieUser = getOozieUser();
- final JobConf dfsConfig = createDFSConfig();
- final String[] userGroups = new String[]{getTestGroup(), getTestGroup2()};
+ int taskTrackers = 2;
+ int dataNodes = 2;
+ String oozieUser = getOozieUser();
+ JobConf conf = createDFSConfig();
+ String[] userGroups = new String[] { getTestGroup(), getTestGroup2() };
UserGroupInformation.createUserForTesting(oozieUser, userGroups);
UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
- UserGroupInformation.createUserForTesting(getTestUser3(), new String[]{"users"});
+ UserGroupInformation.createUserForTesting(getTestUser3(), new String[] { "users" } );
try {
- dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
- .numDataNodes(dataNodes)
- .format(true)
- .racks(null)
- .build();
-
- createHdfsPathsAndSetupPermissions();
-
- final Configuration yarnConfig = createYarnConfig(dfsConfig);
- yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
- yarnCluster.init(yarnConfig);
- yarnCluster.start();
- final JobConf jobConf = new JobConf(yarnCluster.getConfig());
+ dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
+ FileSystem fileSystem = dfsCluster.getFileSystem();
+ fileSystem.mkdirs(new Path("target/test-data"));
+ fileSystem.mkdirs(new Path("target/test-data"+"/minicluster/mapred"));
+ fileSystem.mkdirs(new Path("/user"));
+ fileSystem.mkdirs(new Path("/tmp"));
+ fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
+ fileSystem.setPermission(new Path("target/test-data"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("target/test-data"+"/minicluster"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("target/test-data"+"/minicluster/mapred"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/hadoop/mapred/system"), FsPermission.valueOf("-rwx------"));
+ String nnURI = fileSystem.getUri().toString();
+ int numDirs = 1;
+ String[] racks = null;
+ String[] hosts = null;
+ mrCluster = new MiniMRCluster(0, 0, taskTrackers, nnURI, numDirs, racks, hosts, null, conf);
+ JobConf jobConf = mrCluster.createJobConf();
System.setProperty(OOZIE_TEST_JOB_TRACKER, jobConf.get("mapred.job.tracker"));
- final String rmAddress = jobConf.get("yarn.resourcemanager.address");
+ String rmAddress = jobConf.get("yarn.resourcemanager.address");
if (rmAddress != null) {
System.setProperty(OOZIE_TEST_JOB_TRACKER, rmAddress);
}
- System.setProperty(OOZIE_TEST_NAME_NODE, dfsCluster.getFileSystem().getUri().toString());
- ProxyUsers.refreshSuperUserGroupsConfiguration(dfsConfig);
- } catch (final Exception ex) {
+ System.setProperty(OOZIE_TEST_NAME_NODE, jobConf.get("fs.default.name"));
+ ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+ }
+ catch (Exception ex) {
shutdownMiniCluster();
throw ex;
}
@@ -950,32 +967,15 @@ public abstract class XTestCase extends TestCase {
}
}
- private void createHdfsPathsAndSetupPermissions() throws IOException {
- final FileSystem fileSystem = dfsCluster.getFileSystem();
-
- fileSystem.mkdirs(new Path("target/test-data"));
- fileSystem.mkdirs(new Path("target/test-data" + "/minicluster/mapred"));
- fileSystem.mkdirs(new Path("/user"));
- fileSystem.mkdirs(new Path("/tmp"));
- fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
-
- fileSystem.setPermission(new Path("target/test-data"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("target/test-data" + "/minicluster"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("target/test-data" + "/minicluster/mapred"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("/hadoop/mapred/system"), FsPermission.valueOf("-rwx------"));
- }
-
private void setUpEmbeddedHadoop2() throws Exception {
if (dfsCluster != null && dfsCluster2 == null) {
// Trick dfs location for MiniDFSCluster since it doesn't accept location as input)
- final String testBuildDataSaved = System.getProperty("test.build.data", "build/test/data");
+ String testBuildDataSaved = System.getProperty("test.build.data", "build/test/data");
try {
System.setProperty("test.build.data", FilenameUtils.concat(testBuildDataSaved, "2"));
// Only DFS cluster is created based upon current need
dfsCluster2 = new MiniDFSCluster(createDFSConfig(), 2, true, null);
- final FileSystem fileSystem = dfsCluster2.getFileSystem();
+ FileSystem fileSystem = dfsCluster2.getFileSystem();
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("/user"));
fileSystem.mkdirs(new Path("/tmp"));
@@ -983,10 +983,12 @@ public abstract class XTestCase extends TestCase {
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
System.setProperty(OOZIE_TEST_NAME_NODE2, fileSystem.getConf().get("fs.default.name"));
- } catch (final Exception ex) {
+ }
+ catch (Exception ex) {
shutdownMiniCluster2();
throw ex;
- } finally {
+ }
+ finally {
// Restore previus value
System.setProperty("test.build.data", testBuildDataSaved);
}
@@ -994,41 +996,31 @@ public abstract class XTestCase extends TestCase {
}
private JobConf createDFSConfig() throws UnknownHostException {
- final JobConf conf = new JobConf();
- conf.set("dfs.block.access.token.enable", "false");
- conf.set("dfs.permissions", "true");
- conf.set("hadoop.security.authentication", "simple");
-
- //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
- final StringBuilder sb = new StringBuilder();
- sb.append("127.0.0.1,localhost");
- for (final InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
- sb.append(",").append(i.getCanonicalHostName());
- }
- conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts", sb.toString());
-
- conf.set("hadoop.proxyuser." + getOozieUser() + ".groups", getTestGroup());
- conf.set("mapred.tasktracker.map.tasks.maximum", "4");
- conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
-
- conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
-
- // Scheduler properties required for YARN CapacityScheduler to work
- conf.set("yarn.scheduler.capacity.root.queues", "default");
- conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
- // Required to prevent deadlocks with YARN CapacityScheduler
- conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
-
- return conf;
- }
-
- private Configuration createYarnConfig(final Configuration parentConfig) {
- final Configuration yarnConfig = new YarnConfiguration(parentConfig);
-
- yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
- yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
-
- return yarnConfig;
+ JobConf conf = new JobConf();
+ conf.set("dfs.block.access.token.enable", "false");
+ conf.set("dfs.permissions", "true");
+ conf.set("hadoop.security.authentication", "simple");
+
+ //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
+ StringBuilder sb = new StringBuilder();
+ sb.append("127.0.0.1,localhost");
+ for (InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
+ sb.append(",").append(i.getCanonicalHostName());
+ }
+ conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts", sb.toString());
+
+ conf.set("hadoop.proxyuser." + getOozieUser() + ".groups", getTestGroup());
+ conf.set("mapred.tasktracker.map.tasks.maximum", "4");
+ conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
+
+ conf.set("hadoop.tmp.dir", "target/test-data"+"/minicluster");
+
+ // Scheduler properties required for YARN CapacityScheduler to work
+ conf.set("yarn.scheduler.capacity.root.queues", "default");
+ conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+ // Required to prevent deadlocks with YARN CapacityScheduler
+ conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
+ return conf;
}
protected void setupHCatalogServer() throws Exception {
@@ -1055,8 +1047,8 @@ public abstract class XTestCase extends TestCase {
if (hs2Config == null) {
// Make HS2 use our Mini cluster by copying all configs to HiveConf; also had to hack MiniHS2
hs2Config = new HiveConf();
- final Configuration jobConf = createJobConf();
- for (final Map.Entry<String, String> pair : jobConf) {
+ Configuration jobConf = createJobConf();
+ for (Map.Entry<String, String> pair : jobConf) {
hs2Config.set(pair.getKey(), pair.getValue());
}
}
@@ -1078,32 +1070,27 @@ public abstract class XTestCase extends TestCase {
return hiveserver2.getJdbcURL();
}
- protected String getHiveServer2JdbcURL(final String dbName) {
+ protected String getHiveServer2JdbcURL(String dbName) {
return hiveserver2.getJdbcURL(dbName);
}
private static void shutdownMiniCluster() {
try {
- if (yarnCluster != null) {
- final YarnJobActions yarnJobActions =
- new YarnJobActions.Builder(yarnCluster.getConfig(), ApplicationsRequestScope.ALL)
- .build();
- final Set<ApplicationId> allYarnJobs = yarnJobActions.getYarnJobs();
-
- yarnJobActions.killSelectedYarnJobs(allYarnJobs);
-
- yarnCluster.stop();
+ if (mrCluster != null) {
+ mrCluster.shutdown();
}
- } catch (final Exception ex) {
- System.out.println(ex.getMessage());
+ }
+ catch (Exception ex) {
+ System.out.println(ex);
}
try {
if (dfsCluster != null) {
dfsCluster.shutdown();
}
- } catch (final Exception ex) {
- System.out.println(ex.getMessage());
+ }
+ catch (Exception ex) {
+ System.out.println(ex);
}
// This is tied to the MiniCluster because it inherits configs from there
hs2Config = null;
@@ -1114,11 +1101,11 @@ public abstract class XTestCase extends TestCase {
if (dfsCluster2 != null) {
dfsCluster2.shutdown();
}
- } catch (final Exception ex) {
+ }
+ catch (Exception ex) {
System.out.println(ex);
}
}
-
private static final AtomicLong LAST_TESTCASE_FINISHED = new AtomicLong();
private static final AtomicInteger RUNNING_TESTCASES = new AtomicInteger();
@@ -1129,7 +1116,7 @@ public abstract class XTestCase extends TestCase {
}
public void run() {
- final long shutdownWait = Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
+ long shutdownWait = Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
LAST_TESTCASE_FINISHED.set(System.currentTimeMillis());
while (true) {
if (RUNNING_TESTCASES.get() == 0) {
@@ -1139,7 +1126,8 @@ public abstract class XTestCase extends TestCase {
}
try {
Thread.sleep(1000);
- } catch (final InterruptedException ex) {
+ }
+ catch (InterruptedException ex) {
break;
}
}
@@ -1149,10 +1137,10 @@ public abstract class XTestCase extends TestCase {
}
@SuppressWarnings("deprecation")
- private JobConf createJobConfFromYarnCluster() {
- final JobConf jobConf = new JobConf();
- final JobConf jobConfYarn = new JobConf(yarnCluster.getConfig());
- for (final Entry<String, String> entry : jobConfYarn) {
+ private JobConf createJobConfFromMRCluster() {
+ JobConf jobConf = new JobConf();
+ JobConf jobConfMR = mrCluster.createJobConf();
+ for ( Entry<String, String> entry : jobConfMR) {
// MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
// TODO call conf.unset after moving completely to Hadoop 2.x
if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
@@ -1167,16 +1155,15 @@ public abstract class XTestCase extends TestCase {
* @return a jobconf preconfigured to talk with the test cluster/minicluster.
*/
protected JobConf createJobConf() throws IOException {
- final JobConf jobConf;
-
- if (yarnCluster != null) {
- jobConf = createJobConfFromYarnCluster();
- } else {
+ JobConf jobConf;
+ if (mrCluster != null) {
+ jobConf = createJobConfFromMRCluster();
+ }
+ else {
jobConf = new JobConf();
jobConf.set("mapred.job.tracker", getJobTrackerUri());
jobConf.set("fs.default.name", getNameNodeUri());
}
-
return jobConf;
}
@@ -1199,22 +1186,29 @@ public abstract class XTestCase extends TestCase {
*
* @param executable The ShutdownJobTrackerExecutable to execute while the JobTracker is shutdown
*/
- protected void executeWhileJobTrackerIsShutdown(final ShutdownJobTrackerExecutable executable) {
+ protected void executeWhileJobTrackerIsShutdown(ShutdownJobTrackerExecutable executable) {
+ mrCluster.stopJobTracker();
+ Exception ex = null;
try {
executable.execute();
- } catch (final Exception e) {
- throw new RuntimeException(e);
+ } catch (Exception e) {
+ ex = e;
+ } finally {
+ mrCluster.startJobTracker();
+ }
+ if (ex != null) {
+ throw new RuntimeException(ex);
}
}
protected Services setupServicesForHCatalog() throws ServiceException {
- final Services services = new Services();
+ Services services = new Services();
setupServicesForHCataLogImpl(services);
return services;
}
- private void setupServicesForHCataLogImpl(final Services services) {
- final Configuration conf = services.getConf();
+ private void setupServicesForHCataLogImpl(Services services) {
+ Configuration conf = services.getConf();
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," +
PartitionDependencyManagerService.class.getName() + "," +
@@ -1222,31 +1216,31 @@ public abstract class XTestCase extends TestCase {
conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
"default=java.naming.factory.initial#" + ActiveMQConnFactory + ";" +
"java.naming.provider.url#" + localActiveMQBroker +
- "connectionFactoryNames#" + "ConnectionFactory");
+ "connectionFactoryNames#"+ "ConnectionFactory");
conf.set(URIHandlerService.URI_HANDLERS,
FSURIHandler.class.getName() + "," + HCatURIHandler.class.getName());
setSystemProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
setSystemProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
}
- protected Services setupServicesForHCatalog(final Services services) throws ServiceException {
+ protected Services setupServicesForHCatalog(Services services) throws ServiceException {
setupServicesForHCataLogImpl(services);
return services;
}
- protected YarnApplicationState waitUntilYarnAppState(final String externalId, final EnumSet<YarnApplicationState> acceptedStates)
+ protected YarnApplicationState waitUntilYarnAppState(String externalId, final EnumSet<YarnApplicationState> acceptedStates)
throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
- final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
final YarnClient yarnClient = Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), jobConf);
try {
waitFor(60 * 1000, new Predicate() {
@Override
public boolean evaluate() throws Exception {
- final YarnApplicationState state = yarnClient.getApplicationReport(appId).getYarnApplicationState();
+ YarnApplicationState state = yarnClient.getApplicationReport(appId).getYarnApplicationState();
finalState.setValue(state);
return acceptedStates.contains(state);
@@ -1262,20 +1256,20 @@ public abstract class XTestCase extends TestCase {
return finalState.getValue();
}
- protected void waitUntilYarnAppDoneAndAssertSuccess(final String externalId) throws HadoopAccessorException, IOException, YarnException {
- final YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ protected void waitUntilYarnAppDoneAndAssertSuccess(String externalId) throws HadoopAccessorException, IOException, YarnException {
+ YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
assertEquals("YARN App state", YarnApplicationState.FINISHED, state);
}
- protected void waitUntilYarnAppKilledAndAssertSuccess(final String externalId) throws HadoopAccessorException, IOException, YarnException {
- final YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ protected void waitUntilYarnAppKilledAndAssertSuccess(String externalId) throws HadoopAccessorException, IOException, YarnException {
+ YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
assertEquals("YARN App state", YarnApplicationState.KILLED, state);
}
protected YarnApplicationState getYarnApplicationState(String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
- final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
// This is needed here because we need a mutable final YarnClient
final MutableObject<YarnClient> yarnClientMO = new MutableObject<YarnClient>(null);
try {
[09/48] oozie git commit: OOZIE-2666 Support embedding Jetty into
Oozie (asasvari via rkanter)
Posted by pb...@apache.org.
OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/ca01c283
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/ca01c283
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/ca01c283
Branch: refs/heads/oya
Commit: ca01c28362ede63bac17e32af42809b1c90c5ce9
Parents: ca4eac4
Author: Robert Kanter <rk...@cloudera.com>
Authored: Sat Nov 12 21:41:17 2016 +0100
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Sat Nov 12 21:41:17 2016 +0100
----------------------------------------------------------------------
bin/mkdistro.sh | 2 +-
core/src/main/conf/oozie-env.sh | 6 +-
.../java/org/apache/oozie/util/ConfigUtils.java | 12 +-
.../org/apache/oozie/util/Instrumentation.java | 8 +-
core/src/main/resources/oozie-default.xml | 93 +++++-
distro/pom.xml | 99 ++++---
distro/src/main/bin/oozie-jetty-server.sh | 226 +++++++++++++++
distro/src/main/bin/oozie-setup.sh | 285 +++++++++++--------
distro/src/main/bin/oozie-sys.sh | 36 ++-
distro/src/main/bin/oozie-tomcat-server.sh | 89 ++++++
distro/src/main/bin/oozied.sh | 98 ++-----
pom.xml | 72 ++++-
release-log.txt | 1 +
server/pom.xml | 257 +++++++++++++++++
server/src/main/assemblies/empty.xml | 21 ++
.../oozie/server/EmbeddedOozieServer.java | 206 ++++++++++++++
.../org/apache/oozie/server/FilterMapper.java | 61 ++++
.../oozie/server/HttpConfigurationWrapper.java | 63 ++++
.../org/apache/oozie/server/JspHandler.java | 161 +++++++++++
.../oozie/server/SSLServerConnectorFactory.java | 136 +++++++++
.../org/apache/oozie/server/ServletMapper.java | 95 +++++++
.../oozie/server/WebRootResourceLocator.java | 39 +++
.../ConstraintSecurityHandlerProvider.java | 47 +++
.../oozie/server/guice/JettyServerProvider.java | 48 ++++
.../oozie/server/guice/JspHandlerProvider.java | 47 +++
.../oozie/server/guice/OozieGuiceModule.java | 45 +++
.../server/guice/RewriteHandlerProvider.java | 44 +++
.../oozie/server/guice/ServicesProvider.java | 39 +++
server/src/main/resources/checkstyle-header.txt | 17 ++
server/src/main/resources/checkstyle.xml | 41 +++
.../oozie/server/TestEmbeddedOozieServer.java | 119 ++++++++
.../org/apache/oozie/server/TestJspHandler.java | 94 ++++++
.../server/TestSSLServerConnectorFactory.java | 137 +++++++++
src/main/assemblies/distro-jetty.xml | 155 ++++++++++
src/main/assemblies/distro-tomcat.xml | 153 ++++++++++
src/main/assemblies/distro.xml | 155 ----------
webapp/src/main/webapp/403.html | 31 ++
webapp/src/main/webapp/404.html | 31 ++
38 files changed, 2862 insertions(+), 407 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/bin/mkdistro.sh
----------------------------------------------------------------------
diff --git a/bin/mkdistro.sh b/bin/mkdistro.sh
index 1ccd495..e0cff36 100755
--- a/bin/mkdistro.sh
+++ b/bin/mkdistro.sh
@@ -70,7 +70,7 @@ MVN_OPTS="-Dbuild.time=${DATETIME} -Dvc.revision=${VC_REV} -Dvc.url=${VC_URL} -D
export DATETIME2=`date -u "+%Y%m%d-%H%M%SGMT"`
mvn clean package assembly:single ${MVN_OPTS} "$@"
-if [ "$?" != "0" ]; then
+if [ "$?" -ne "0" ]; then
echo
echo "ERROR, Oozie distro creation failed"
echo
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/core/src/main/conf/oozie-env.sh
----------------------------------------------------------------------
diff --git a/core/src/main/conf/oozie-env.sh b/core/src/main/conf/oozie-env.sh
index 390c955..bc8c601 100644
--- a/core/src/main/conf/oozie-env.sh
+++ b/core/src/main/conf/oozie-env.sh
@@ -22,7 +22,9 @@
# Settings for the Embedded Tomcat that runs Oozie
# Java System properties for Oozie should be specified in this variable
#
-export CATALINA_OPTS="$CATALINA_OPTS -Xmx1024m"
+if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
+ export CATALINA_OPTS="$CATALINA_OPTS -Xmx1024m"
+fi
# Oozie configuration file to load from Oozie configuration directory
#
@@ -66,4 +68,4 @@ export CATALINA_OPTS="$CATALINA_OPTS -Xmx1024m"
# The Oozie Instance ID
#
-# export OOZIE_INSTANCE_ID="${OOZIE_HTTP_HOSTNAME}"
\ No newline at end of file
+# export OOZIE_INSTANCE_ID="${OOZIE_HTTP_HOSTNAME}"
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/core/src/main/java/org/apache/oozie/util/ConfigUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/ConfigUtils.java b/core/src/main/java/org/apache/oozie/util/ConfigUtils.java
index a56c5a2..792723f 100644
--- a/core/src/main/java/org/apache/oozie/util/ConfigUtils.java
+++ b/core/src/main/java/org/apache/oozie/util/ConfigUtils.java
@@ -28,6 +28,10 @@ import org.apache.oozie.servlet.ServicesLoader;
*/
public class ConfigUtils {
private final static XLog LOG = XLog.getLog(ConfigUtils.class);
+ public static final String OOZIE_HTTPS_ENABLED = "oozie.https.enabled";
+ public static final String OOZIE_HTTP_HOSTNAME = "oozie.http.hostname";
+ public static final String OOZIE_HTTPS_PORT = "oozie.https.port";
+ public static final String OOZIE_HTTP_PORT = "oozie.http.port";
public static boolean BOOLEAN_DEFAULT = false;
public static String STRING_DEFAULT = "";
@@ -92,13 +96,13 @@ public class ConfigUtils {
else {
sb.append("http://");
}
- sb.append(ConfigurationService.get("oozie.http.hostname"));
+ sb.append(ConfigurationService.get(OOZIE_HTTP_HOSTNAME));
sb.append(":");
if (secure) {
- sb.append(ConfigurationService.get("oozie.https.port"));
+ sb.append(ConfigurationService.get(OOZIE_HTTPS_PORT));
}
else {
- sb.append(ConfigurationService.get("oozie.http.port"));
+ sb.append(ConfigurationService.get(OOZIE_HTTP_PORT));
}
sb.append("/oozie");
return sb.toString();
@@ -110,7 +114,7 @@ public class ConfigUtils {
* @return the HTTP or HTTPS URL for this Oozie server
*/
public static String getOozieEffectiveUrl() {
- return getOozieURL(ServicesLoader.isSSLEnabled());
+ return getOozieURL(ServicesLoader.isSSLEnabled() || ConfigurationService.getBoolean(OOZIE_HTTPS_ENABLED));
}
public static boolean isBackwardSupportForCoordStatus() {
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/core/src/main/java/org/apache/oozie/util/Instrumentation.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/Instrumentation.java b/core/src/main/java/org/apache/oozie/util/Instrumentation.java
index 99d64ac..55e00d4 100644
--- a/core/src/main/java/org/apache/oozie/util/Instrumentation.java
+++ b/core/src/main/java/org/apache/oozie/util/Instrumentation.java
@@ -784,9 +784,11 @@ public class Instrumentation {
if (map.containsKey(name)) {
throw new RuntimeException(XLog.format("Sampler group=[{0}] name=[{1}] already defined", group, name));
}
- Sampler sampler = new Sampler(period, interval, variable);
- map.put(name, sampler);
- scheduler.scheduleAtFixedRate(sampler, 0, sampler.getSamplingInterval(), TimeUnit.SECONDS);
+ else {
+ Sampler sampler = new Sampler(period, interval, variable);
+ map.put(name, sampler);
+ scheduler.scheduleAtFixedRate(sampler, 0, sampler.getSamplingInterval(), TimeUnit.SECONDS);
+ }
}
finally {
samplerLock.unlock();
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/core/src/main/resources/oozie-default.xml
----------------------------------------------------------------------
diff --git a/core/src/main/resources/oozie-default.xml b/core/src/main/resources/oozie-default.xml
index e71ebe3..8565643 100644
--- a/core/src/main/resources/oozie-default.xml
+++ b/core/src/main/resources/oozie-default.xml
@@ -2488,9 +2488,10 @@ will be the requeue interval for the actions which are waiting for a long time w
<property>
<name>oozie.http.hostname</name>
- <value>localhost</value>
+ <value>0.0.0.0</value>
<description>
- Oozie server host name.
+ Oozie server host name. The network interface Oozie server binds to as an IP address or a hostname.
+ Most users won't need to change this setting from the default value.
</description>
</property>
@@ -2503,6 +2504,22 @@ will be the requeue interval for the actions which are waiting for a long time w
</property>
<property>
+ <name>oozie.http.request.header.size</name>
+ <value>65536</value>
+ <description>
+ Oozie HTTP request header size.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.http.response.header.size</name>
+ <value>65536</value>
+ <description>
+ Oozie HTTP response header size.
+ </description>
+ </property>
+
+ <property>
<name>oozie.https.port</name>
<value>11443</value>
<description>
@@ -2511,6 +2528,70 @@ will be the requeue interval for the actions which are waiting for a long time w
</property>
<property>
+ <name>oozie.https.enabled</name>
+ <value>false</value>
+ <description>
+ Controls whether SSL encryption is enabled.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.https.truststore.file</name>
+ <value>custom.truststore</value>
+ <description>
+ Path to a TrustStore file.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.https.truststore.pass</name>
+ <value>cloudera</value>
+ <description>
+ Password to the TrustStore.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.https.keystore.file</name>
+ <value></value>
+ <description>
+ Path to a KeyStore file.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.https.keystore.pass</name>
+ <value></value>
+ <description>
+ Password to the KeyStore.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.https.include.protocols</name>
+ <value>TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2</value>
+ <description>
+ Enabled TLS protocols.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.https.exclude.cipher.suites</name>
+ <value>TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_DES_CBC_SHA,SSL_DHE_RSA_WITH_DES_CBC_SHA,SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_RC4_128_MD5</value>
+ <description>
+ List of weak Cipher suites to exclude.
+ </description>
+ </property>
+
+ <property>
+ <name>oozie.jsp.tmp.dir</name>
+ <value>/tmp</value>
+ <description>
+ Temporary directory for compiling JSP pages.
+ </description>
+ </property>
+
+ <property>
<name>oozie.instance.id</name>
<value>${oozie.http.hostname}</value>
<description>
@@ -2519,6 +2600,14 @@ will be the requeue interval for the actions which are waiting for a long time w
</description>
</property>
+ <property>
+ <name>oozie.server.threadpool.max.threads</name>
+ <value>150</value>
+ <description>
+ Controls the threadpool size for the Oozie Server (both Jetty and Tomcat)
+ </description>
+ </property>
+
<!-- Sharelib Configuration -->
<property>
<name>oozie.service.ShareLibService.mapping.file</name>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/distro/pom.xml
----------------------------------------------------------------------
diff --git a/distro/pom.xml b/distro/pom.xml
index def0069..ce5319f 100644
--- a/distro/pom.xml
+++ b/distro/pom.xml
@@ -31,6 +31,10 @@
<name>Apache Oozie Distro</name>
<packaging>jar</packaging>
+ <properties>
+ <distro.descriptor>../src/main/assemblies/distro-jetty.xml</distro.descriptor>
+ </properties>
+
<dependencies>
<dependency>
<groupId>org.apache.oozie</groupId>
@@ -54,7 +58,6 @@
<scope>compile</scope>
</dependency>
</dependencies>
-
<build>
<plugins>
<plugin>
@@ -74,49 +77,11 @@
<configuration>
<finalName>oozie-${project.version}</finalName>
<descriptors>
- <descriptor>../src/main/assemblies/distro.xml</descriptor>
+ <descriptor>${distro.descriptor}</descriptor>
</descriptors>
</configuration>
</plugin>
- <!-- Downloading Tomcat TAR.GZ, using downloads/ dir to avoid downloading over an over -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <configuration>
- <target>
- <mkdir dir="downloads"/>
- <get src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
- dest="downloads/tomcat-${tomcat.version}.tar.gz" verbose="true" skipexisting="true"/>
- <delete dir="target/tomcat"/>
- <mkdir dir="target/tomcat"/>
- <gunzip src="downloads/tomcat-${tomcat.version}.tar.gz"
- dest="target/tomcat/tomcat-${tomcat.version}.tar"/>
- <untar src="target/tomcat/tomcat-${tomcat.version}.tar" dest="target/tomcat"/>
- <move file="target/tomcat/apache-tomcat-${tomcat.version}" tofile="target/tomcat/oozie-server"/>
- <delete dir="target/tomcat/oozie-server/webapps"/>
- <mkdir dir="target/tomcat/oozie-server/webapps"/>
- <delete file="target/tomcat/oozie-server/conf/server.xml"/>
- <copy file="src/main/tomcat/server.xml" toDir="target/tomcat/oozie-server/conf"/>
- <copy file="src/main/tomcat/logging.properties"
- toDir="target/tomcat/oozie-server/conf"/>
- <mkdir dir="target/tomcat/oozie-server/conf/ssl"/>
- <copy file="src/main/tomcat/server.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
- <copy file="src/main/tomcat/ssl-server.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
- <copy file="src/main/tomcat/ssl-web.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
- <copy todir="target/tomcat/oozie-server/webapps/ROOT">
- <fileset dir="src/main/tomcat/ROOT"/>
- </copy>
- </target>
- </configuration>
- <goals>
- <goal>run</goal>
- </goals>
- <phase>package</phase>
- </execution>
- </executions>
- </plugin>
+
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
@@ -143,6 +108,56 @@
</dependency>
</dependencies>
</profile>
+
+ <profile>
+ <id>tomcat</id>
+ <properties>
+ <distro.descriptor>../src/main/assemblies/distro-tomcat.xml</distro.descriptor>
+ </properties>
+ <build>
+ <plugins>
+ <!-- Downloading Tomcat TAR.GZ, using downloads/ dir to avoid downloading over an over -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>1.6</version>
+ <executions>
+ <execution>
+ <configuration>
+ <target>
+ <mkdir dir="downloads"/>
+ <get src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
+ dest="downloads/tomcat-${tomcat.version}.tar.gz" verbose="true" skipexisting="true"/>
+ <delete dir="target/tomcat"/>
+ <mkdir dir="target/tomcat"/>
+ <gunzip src="downloads/tomcat-${tomcat.version}.tar.gz"
+ dest="target/tomcat/tomcat-${tomcat.version}.tar"/>
+ <untar src="target/tomcat/tomcat-${tomcat.version}.tar" dest="target/tomcat"/>
+ <move file="target/tomcat/apache-tomcat-${tomcat.version}" tofile="target/tomcat/oozie-server"/>
+ <delete dir="target/tomcat/oozie-server/webapps"/>
+ <mkdir dir="target/tomcat/oozie-server/webapps"/>
+ <delete file="target/tomcat/oozie-server/conf/server.xml"/>
+ <copy file="src/main/tomcat/server.xml" toDir="target/tomcat/oozie-server/conf"/>
+ <copy file="src/main/tomcat/logging.properties"
+ toDir="target/tomcat/oozie-server/conf"/>
+ <mkdir dir="target/tomcat/oozie-server/conf/ssl"/>
+ <copy file="src/main/tomcat/server.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
+ <copy file="src/main/tomcat/ssl-server.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
+ <copy file="src/main/tomcat/ssl-web.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
+ <copy todir="target/tomcat/oozie-server/webapps/ROOT">
+ <fileset dir="src/main/tomcat/ROOT"/>
+ </copy>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <phase>package</phase>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
</profiles>
</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/distro/src/main/bin/oozie-jetty-server.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-jetty-server.sh b/distro/src/main/bin/oozie-jetty-server.sh
new file mode 100644
index 0000000..8acfc2e
--- /dev/null
+++ b/distro/src/main/bin/oozie-jetty-server.sh
@@ -0,0 +1,226 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Set Jetty related environment variables
+setup_jetty_log_and_pid() {
+ if [ "${JETTY_OUT}" = "" ]; then
+ export JETTY_OUT=${OOZIE_LOG}/jetty.out
+ print "Setting JETTY_OUT: ${JETTY_OUT}"
+ else
+ print "Using JETTY_OUT: ${JETTY_OUT}"
+ fi
+
+ if [ "${JETTY_PID_FILE}" = "" ]; then
+ export JETTY_PID_FILE=${JETTY_DIR}/oozie.pid
+ print "Setting JETTY_PID_FILE: ${JETTY_PID_FILE}"
+ else
+ print "Using JETTY_PID_FILE: ${JETTY_PID_FILE}"
+ fi
+}
+
+setup_java_opts() {
+ if [ -z "${JAVA_HOME}" -a -z "${JRE_HOME}" ]; then
+ if ${darwin}; then
+ if [ -x '/usr/libexec/java_home' ] ; then
+ export JAVA_HOME=`/usr/libexec/java_home`
+ elif [ -d "/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home" ]; then
+ export JAVA_HOME="/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home"
+ fi
+ else
+ JAVA_PATH=`which java 2>/dev/null`
+ if [ "x${JAVA_PATH}" != "x" ]; then
+ JAVA_PATH=`dirname ${JAVA_PATH} 2>/dev/null`
+ fi
+ if [ "x${JRE_HOME}" = "x" ]; then
+ if [ -x /usr/bin/java ]; then
+ JRE_HOME=/usr
+ fi
+ fi
+ fi
+ if [ -z "${JAVA_HOME}" -a -z "${JRE_HOME}" ]; then
+ echo "Neither the JAVA_HOME nor the JRE_HOME environment variable is defined"
+ echo "At least one of these environment variable is needed to run this program"
+ exit 1
+ fi
+ fi
+ if [ -z "${JRE_HOME}" ]; then
+ JRE_HOME="${JAVA_HOME}"
+ fi
+
+ JAVA_BIN="${JRE_HOME}"/bin/java
+ echo "Using Java executable from ${JRE_HOME}"
+}
+
+setup_jetty_opts() {
+ echo "Using JETTY_OPTS: ${JETTY_OPTS}"
+ jetty_opts="-Doozie.home.dir=${OOZIE_HOME}";
+ jetty_opts="${jetty_opts} -Doozie.config.dir=${OOZIE_CONFIG}";
+ jetty_opts="${jetty_opts} -Doozie.log.dir=${OOZIE_LOG}";
+ jetty_opts="${jetty_opts} -Doozie.data.dir=${OOZIE_DATA}";
+ jetty_opts="${jetty_opts} -Doozie.config.file=${OOZIE_CONFIG_FILE}";
+ jetty_opts="${jetty_opts} -Doozie.log4j.file=${OOZIE_LOG4J_FILE}";
+ jetty_opts="${jetty_opts} -Doozie.log4j.reload=${OOZIE_LOG4J_RELOAD}";
+ # add required native libraries such as compression codecs
+ jetty_opts="${jetty_opts} -Djava.library.path=${JAVA_LIBRARY_PATH}";
+
+ jetty_opts="${jetty_opts} -cp ${JETTY_DIR}/*:${JETTY_DIR}/dependency/*:${BASEDIR}/lib/*:${BASEDIR}/libtools/*:${JETTY_DIR}"
+ echo "Adding to JETTY_OPTS: ${jetty_opts}"
+
+ export JETTY_OPTS="${JETTY_OPTS} ${jetty_opts}"
+}
+
+start_jetty() {
+ if [ ! -z "${JETTY_PID_FILE}" ]; then
+ if [ -f "${JETTY_PID_FILE}" ]; then
+ if [ -s "${JETTY_PID_FILE}" ]; then
+ echo "Existing PID file found during start."
+ if [ -r "${JETTY_PID_FILE}" ]; then
+ PID=$(cat "${JETTY_PID_FILE}")
+ ps -p "$PID" >/dev/null 2>&1
+ if [ $? -eq 0 ] ; then
+ echo "Oozie server appears to still be running with PID $PID. Start aborted."
+ echo "If the following process is not a Jetty process, remove the PID file and try again:"
+ ps -f -p "$PID"
+ exit 1
+ else
+ echo "Removing/clearing stale PID file."
+ rm -f "${JETTY_PID_FILE}" >/dev/null 2>&1
+ if [ $? != 0 ]; then
+ if [ -w "${JETTY_PID_FILE}" ]; then
+ cat /dev/null > "${JETTY_PID_FILE}"
+ else
+ echo "Unable to remove or clear stale PID file. Start aborted."
+ exit 1
+ fi
+ fi
+ fi
+ else
+ echo "Unable to read PID file. Start aborted."
+ exit 1
+ fi
+ else
+ rm -f "$JETTY_PID_FILE" >/dev/null 2>&1
+ if [ $? != 0 ]; then
+ if [ ! -w "$JETTY_PID_FILE" ]; then
+ echo "Unable to remove or write to empty PID file. Start aborted."
+ exit 1
+ fi
+ fi
+ fi
+ fi
+ fi
+
+ ${JAVA_BIN} ${JETTY_OPTS} org.apache.oozie.server.EmbeddedOozieServer >> "${JETTY_OUT}" 2>&1 &
+ PID=$!
+ if [ ${PID} ]; then
+ echo -n "Oozie server started"
+ fi
+
+ if [ ! -z "${JETTY_PID_FILE}" ]; then
+ echo -n $! > "${JETTY_PID_FILE}"
+ echo -n " - PID: ${PID}."
+ fi
+ echo
+}
+
+run_jetty() {
+ ${JAVA_BIN} ${JETTY_OPTS} org.apache.oozie.server.EmbeddedOozieServer
+}
+
+#TODO allow users to force kill jetty. Add --force
+stop_jetty() {
+ if [ ! -z "${JETTY_PID_FILE}" ]; then
+ if [ -f "${JETTY_PID_FILE}" ]; then
+ if [ -s "${JETTY_PID_FILE}" ]; then
+ kill -0 "$(cat "${JETTY_PID_FILE}")" >/dev/null 2>&1
+ if [ $? -gt 0 ]; then
+ echo "PID file found but no matching process was found. Stop aborted."
+ exit 1
+ fi
+ else
+ echo "PID file is empty and has been ignored."
+ fi
+ else
+ echo "\$JETTY_PID_FILE was set but the specified file does not exist. Is Oozie server running? Stop aborted."
+ exit 1
+ fi
+ fi
+
+ kill "$(cat "${JETTY_PID_FILE}")"
+
+ RETRY_COUNT=5
+
+ if [ ! -z "${JETTY_PID_FILE}" ]; then
+ if [ -f "${JETTY_PID_FILE}" ]; then
+ while [ $RETRY_COUNT -ge 0 ]; do
+ kill -0 "$(cat ${JETTY_PID_FILE})" >/dev/null 2>&1
+ if [ $? -gt 0 ]; then
+ rm -f "${JETTY_PID_FILE}" >/dev/null 2>&1
+ if [ $? != 0 ]; then
+ if [ -w "${JETTY_PID_FILE}" ]; then
+ cat /dev/null > "${JETTY_PID_FILE}"
+ else
+ echo "Oozie server stopped but the PID file could not be removed or cleared."
+ fi
+ fi
+ break
+ fi
+ if [ ${RETRY_COUNT} -gt 0 ]; then
+ sleep 1
+ fi
+ if [ ${RETRY_COUNT} -eq 0 ]; then
+ echo "Oozie server did not stop in time. PID file was not removed."
+ fi
+ RETRY_COUNT=$((RETRY_COUNT - 1))
+ done
+ fi
+ fi
+}
+
+symlink_lib() {
+ test -e ${BASEDIR}/lib || ln -s ${JETTY_DIR}/webapp/WEB-INF/lib ${BASEDIR}/lib
+}
+
+jetty_main() {
+ source ${BASEDIR}/bin/oozie-sys.sh
+ JETTY_DIR=${BASEDIR}/embedded-oozie-server
+
+ setup_jetty_log_and_pid
+ setup_java_opts
+ setup_jetty_opts
+
+ actionCmd=$1
+ case $actionCmd in
+ (run)
+ ${BASEDIR}/bin/oozie-setup.sh
+ symlink_lib
+ setup_ooziedb
+ run_jetty
+ ;;
+ (start)
+ ${BASEDIR}/bin/oozie-setup.sh
+ symlink_lib
+ setup_ooziedb
+ start_jetty
+ ;;
+ (stop)
+ stop_jetty
+ ;;
+ esac
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/distro/src/main/bin/oozie-setup.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-setup.sh b/distro/src/main/bin/oozie-setup.sh
index 79b049b..9d6a2d0 100644
--- a/distro/src/main/bin/oozie-setup.sh
+++ b/distro/src/main/bin/oozie-setup.sh
@@ -74,9 +74,7 @@ function checkExec() {
then
echo
echo "Failed: $1"
- echo
- cleanUp
- exit -1;
+ cleanup_and_exit
fi
}
@@ -85,9 +83,7 @@ function checkFileExists() {
if [ ! -e ${1} ]; then
echo
echo "File/Dir does no exist: ${1}"
- echo
- cleanUp
- exit -1
+ cleanup_and_exit
fi
}
@@ -96,9 +92,7 @@ function checkFileDoesNotExist() {
if [ -e ${1} ]; then
echo
echo "File/Dir already exists: ${1}"
- echo
- cleanUp
- exit -1
+ cleanup_and_exit
fi
}
@@ -119,6 +113,10 @@ done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
+JETTY_DIR=${BASEDIR}/embedded-oozie-server
+JETTY_WEBAPP_DIR=${JETTY_DIR}/webapp
+JETTY_LIB_DIR=${JETTY_WEBAPP_DIR}/WEB-INF/lib/
+
source ${BASEDIR}/bin/oozie-sys.sh -silent
addExtjs=""
@@ -145,10 +143,16 @@ do
#Create lib directory from war if lib doesn't exist
if [ ! -d "${BASEDIR}/lib" ]; then
mkdir ${BASEDIR}/lib
- unzip ${BASEDIR}/oozie.war WEB-INF/lib/*.jar -d ${BASEDIR}/lib > /dev/null
- mv ${BASEDIR}/lib/WEB-INF/lib/*.jar ${BASEDIR}/lib/
- rmdir ${BASEDIR}/lib/WEB-INF/lib
- rmdir ${BASEDIR}/lib/WEB-INF
+
+ if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
+ unzip ${BASEDIR}/oozie.war WEB-INF/lib/*.jar -d ${BASEDIR}/lib > /dev/null
+ mv ${BASEDIR}/lib/WEB-INF/lib/*.jar ${BASEDIR}/lib/
+ rmdir ${BASEDIR}/lib/WEB-INF/lib
+ rmdir ${BASEDIR}/lib/WEB-INF
+ else
+ cp ${JETTY_LIB_DIR}/* ${BASEDIR}/lib
+ fi
+
fi
OOZIECPPATH=""
@@ -187,7 +191,7 @@ do
shift
done
-if [ -e "${CATALINA_PID}" ]; then
+if [ -e "${CATALINA_PID}" -a "${OOZIE_USE_TOMCAT}" = "1" ]; then
echo
echo "ERROR: Stop Oozie first"
echo
@@ -196,19 +200,25 @@ fi
echo
-if [ "${prepareWar}" == "" ]; then
- echo "no arguments given"
- printUsage
- exit -1
-else
- if [ -e "${outputWar}" ]; then
- chmod -f u+w ${outputWar}
- rm -rf ${outputWar}
- fi
- rm -rf ${outputWarExpanded}
- # Adding extension JARs
+log_ready_to_start() {
+ echo
+
+ echo "INFO: Oozie is ready to be started"
+
+ echo
+}
+
+check_extjs() {
+ if [ "${addExtjs}" = "true" ]; then
+ checkFileExists ${extjsHome}
+ else
+ echo "INFO: Oozie webconsole disabled, ExtJS library not specified"
+ fi
+}
+# Check if it is necessary to add extension JARs and ExtJS
+check_adding_extensions() {
libext=${OOZIE_HOME}/libext
if [ "${additionalDir}" != "" ]; then
libext=${additionalDir}
@@ -227,114 +237,167 @@ else
addExtjs=true
fi
fi
+}
- prepare
-
- checkFileExists ${inputWar}
- checkFileDoesNotExist ${outputWar}
+cleanup_and_exit() {
+ echo
+ cleanUp
+ exit -1
+}
- if [ "${addExtjs}" = "true" ]; then
- checkFileExists ${extjsHome}
- else
- echo "INFO: Oozie webconsole disabled, ExtJS library not specified"
+prepare_jetty() {
+ check_adding_extensions
+ check_extjs
+
+ if [ "${addExtjs}" = "true" -a ! -e ${JETTY_WEBAPP_DIR}/ext-2.2 ]; then
+ unzip ${extjsHome} -d ${JETTY_WEBAPP_DIR}
+ checkExec "Extracting ExtJS to ${JETTY_WEBAPP_DIR}/"
+ elif [ "${addExtjs}" = "true" -a -e ${JETTY_WEBAPP_DIR}/ext-2.2 ]; then
+ # TODO
+ echo "${JETTY_WEBAPP_DIR}/ext-2.2 already exists"
+ cleanup_and_exit
fi
if [ "${addJars}" = "true" ]; then
- for jarPath in ${jarsPath//:/$'\n'}
- do
- checkFileExists ${jarPath}
- done
+ for jarPath in ${jarsPath//:/$'\n'}
+ do
+ found=`ls ${JETTY_LIB_DIR}/${jarPath} 2> /dev/null | wc -l`
+ checkExec "looking for JAR ${jarPath} in ${JETTY_LIB_DIR}"
+ if [ ! $found = 0 ]; then
+ echo
+ echo "${JETTY_LIB_DIR} already contains JAR ${jarPath}"
+ cleanup_and_exit
+ fi
+ cp ${jarPath} ${JETTY_LIB_DIR}
+ checkExec "copying jar ${jarPath} to '${JETTY_LIB_DIR}'"
+ done
fi
+}
- #Unpacking original war
- unzip ${inputWar} -d ${tmpWarDir} > /dev/null
- checkExec "unzipping Oozie input WAR"
+prepare_tomcat() {
+ if [ "${prepareWar}" == "" ]; then
+ echo "no arguments given"
+ printUsage
+ exit -1
+ else
+ if [ -e "${outputWar}" ]; then
+ chmod -f u+w ${outputWar}
+ rm -rf ${outputWar}
+ fi
+ rm -rf ${outputWarExpanded}
- components=""
+ check_adding_extensions
- if [ "${secure}" != "" ]; then
- #Use the SSL version of server.xml in oozie-server
- checkFileExists ${secureConfigsDir}/ssl-server.xml
- cp ${secureConfigsDir}/ssl-server.xml ${CATALINA_BASE}/conf/server.xml
- #Inject the SSL version of web.xml in oozie war
- checkFileExists ${secureConfigsDir}/ssl-web.xml
- cp ${secureConfigsDir}/ssl-web.xml ${tmpWarDir}/WEB-INF/web.xml
- echo "INFO: Using secure server.xml and secure web.xml"
- else
- #Use the regular version of server.xml in oozie-server
- checkFileExists ${secureConfigsDir}/server.xml
- cp ${secureConfigsDir}/server.xml ${CATALINA_BASE}/conf/server.xml
- #No need to restore web.xml because its already in the original WAR file
- fi
+ prepare
- if [ "${addExtjs}" = "true" ]; then
- if [ ! "${components}" = "" ];then
- components="${components}, "
- fi
- components="${components}ExtJS library"
- if [ -e ${tmpWarDir}/ext-2.2 ]; then
- echo
- echo "Specified Oozie WAR '${inputWar}' already contains ExtJS library files"
- echo
- cleanUp
- exit -1
- fi
- #If the extjs path given is a ZIP, expand it and use it from there
- if [ -f ${extjsHome} ]; then
- unzip ${extjsHome} -d ${tmpDir} > /dev/null
- extjsHome=${tmpDir}/ext-2.2
+ checkFileExists ${inputWar}
+ checkFileDoesNotExist ${outputWar}
+
+ check_extjs
+
+ if [ "${addJars}" = "true" ]; then
+ for jarPath in ${jarsPath//:/$'\n'}
+ do
+ checkFileExists ${jarPath}
+ done
fi
- #Inject the library in oozie war
- cp -r ${extjsHome} ${tmpWarDir}/ext-2.2
- checkExec "copying ExtJS files into staging"
- fi
- if [ "${addJars}" = "true" ]; then
- if [ ! "${components}" = "" ];then
- components="${components}, "
+ #Unpacking original war
+ unzip ${inputWar} -d ${tmpWarDir} > /dev/null
+ checkExec "unzipping Oozie input WAR"
+
+ components=""
+
+ if [ "${OOZIE_USE_TOMCAT}" == "1" ]; then
+ if [ "${secure}" != "" ]; then
+ #Use the SSL version of server.xml in oozie-server
+ checkFileExists ${secureConfigsDir}/ssl-server.xml
+ cp ${secureConfigsDir}/ssl-server.xml ${CATALINA_BASE}/conf/server.xml
+ #Inject the SSL version of web.xml in oozie war
+ checkFileExists ${secureConfigsDir}/ssl-web.xml
+ cp ${secureConfigsDir}/ssl-web.xml ${tmpWarDir}/WEB-INF/web.xml
+ echo "INFO: Using secure server.xml and secure web.xml"
+ else
+ #Use the regular version of server.xml in oozie-server
+ checkFileExists ${secureConfigsDir}/server.xml
+ cp ${secureConfigsDir}/server.xml ${CATALINA_BASE}/conf/server.xml
+ #No need to restore web.xml because its already in the original WAR file
+ fi
fi
- components="${components}JARs"
- for jarPath in ${jarsPath//:/$'\n'}
- do
- found=`ls ${tmpWarDir}/WEB-INF/lib/${jarPath} 2> /dev/null | wc -l`
- checkExec "looking for JAR ${jarPath} in input WAR"
- if [ ! $found = 0 ]; then
- echo
- echo "Specified Oozie WAR '${inputWar}' already contains JAR ${jarPath}"
+ if [ "${addExtjs}" = "true" ]; then
+ if [ ! "${components}" = "" ];then
+ components="${components}, "
+ fi
+ components="${components}ExtJS library"
+ if [ -e ${tmpWarDir}/ext-2.2 ]; then
echo
- cleanUp
- exit -1
+ echo "Specified Oozie WAR '${inputWar}' already contains ExtJS library files"
+ cleanup_and_exit
fi
- cp ${jarPath} ${tmpWarDir}/WEB-INF/lib/
- checkExec "copying jar ${jarPath} to staging"
- done
- fi
+ #If the extjs path given is a ZIP, expand it and use it from there
+ if [ -f ${extjsHome} ]; then
+ unzip ${extjsHome} -d ${tmpDir} > /dev/null
+ extjsHome=${tmpDir}/ext-2.2
+ fi
+ #Inject the library in oozie war
+ cp -r ${extjsHome} ${tmpWarDir}/ext-2.2
+ checkExec "copying ExtJS files into staging"
+ fi
- #Creating new Oozie WAR
- currentDir=`pwd`
- cd ${tmpWarDir}
- zip -r oozie.war * > /dev/null
- checkExec "creating new Oozie WAR"
- cd ${currentDir}
+ if [ "${addJars}" = "true" ]; then
+ if [ ! "${components}" = "" ];then
+ components="${components}, "
+ fi
+ components="${components}JARs"
- #copying new Oozie WAR to asked location
- cp ${tmpWarDir}/oozie.war ${outputWar}
- checkExec "copying new Oozie WAR"
+ for jarPath in ${jarsPath//:/$'\n'}
+ do
+ found=`ls ${tmpWarDir}/WEB-INF/lib/${jarPath} 2> /dev/null | wc -l`
+ checkExec "looking for JAR ${jarPath} in input WAR"
+ if [ ! $found = 0 ]; then
+ echo
+ echo "Specified Oozie WAR '${inputWar}' already contains JAR ${jarPath}"
+ cleanup_and_exit
+ fi
+ cp ${jarPath} ${tmpWarDir}/WEB-INF/lib/
+ checkExec "copying jar ${jarPath} to staging"
+ done
+ fi
- echo
- echo "New Oozie WAR file with added '${components}' at ${outputWar}"
- echo
- cleanUp
+ #Creating new Oozie WAR
+ currentDir=`pwd`
+ cd ${tmpWarDir}
+ zip -r oozie.war * > /dev/null
+ checkExec "creating new Oozie WAR"
+ cd ${currentDir}
- if [ "$?" != "0" ]; then
- exit -1
- fi
+ #copying new Oozie WAR to asked location
+ if [ "${OOZIE_USE_TOMCAT}" == "1" ]; then
+ cp ${tmpWarDir}/oozie.war ${outputWar}
+ checkExec "copying new Oozie WAR"
- echo
+ echo
+ echo "New Oozie WAR file with added '${components}' at ${outputWar}"
+ echo
+ fi
- echo "INFO: Oozie is ready to be started"
+ cleanUp
- echo
+ if [ "$?" -ne "0" ]; then
+ exit -1
+ fi
+ log_ready_to_start
+
+ fi
+}
+
+if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
+ prepare_tomcat
+else
+ prepare_jetty
fi
+
+log_ready_to_start
+exit 0
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/distro/src/main/bin/oozie-sys.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-sys.sh b/distro/src/main/bin/oozie-sys.sh
index 97d55a2..688aeb2 100755
--- a/distro/src/main/bin/oozie-sys.sh
+++ b/distro/src/main/bin/oozie-sys.sh
@@ -195,7 +195,7 @@ else
print "Using OOZIE_BASE_URL: ${OOZIE_BASE_URL}"
fi
-if [ "${CATALINA_BASE}" = "" ]; then
+if [ "${OOZIE_USE_TOMCAT}" = "1" -a "${CATALINA_BASE}" = "" ]; then
export CATALINA_BASE=${OOZIE_HOME}/oozie-server
print "Setting CATALINA_BASE: ${CATALINA_BASE}"
else
@@ -223,20 +223,40 @@ else
print "Using OOZIE_INSTANCE_ID: ${OOZIE_INSTANCE_ID}"
fi
-if [ "${CATALINA_OUT}" = "" ]; then
- export CATALINA_OUT=${OOZIE_LOG}/catalina.out
- print "Setting CATALINA_OUT: ${CATALINA_OUT}"
-else
- print "Using CATALINA_OUT: ${CATALINA_OUT}"
+if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
+ if [ "${CATALINA_OUT}" = "" ]; then
+ export CATALINA_OUT=${OOZIE_LOG}/catalina.out
+ print "Setting CATALINA_OUT: ${CATALINA_OUT}"
+ else
+ print "Using CATALINA_OUT: ${CATALINA_OUT}"
+ fi
fi
-if [ "${CATALINA_PID}" = "" ]; then
+if [ "${OOZIE_USE_TOMCAT}" = "1" -a "${CATALINA_PID}" = "" ]; then
export CATALINA_PID=${OOZIE_HOME}/oozie-server/temp/oozie.pid
print "Setting CATALINA_PID: ${CATALINA_PID}"
else
print "Using CATALINA_PID: ${CATALINA_PID}"
fi
-export CATALINA_OPTS="${CATALINA_OPTS} -Dderby.stream.error.file=${OOZIE_LOG}/derby.log"
+if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
+ export CATALINA_OPTS="${CATALINA_OPTS} -Dderby.stream.error.file=${OOZIE_LOG}/derby.log"
+fi
print
+
+setup_ooziedb() {
+ echo "Setting up oozie DB"
+ ${BASEDIR}/bin/ooziedb.sh create -run
+ if [ "$?" -ne "0" ]; then
+ exit -1
+ fi
+ echo
+}
+
+if [ "${JAVA_HOME}" != "" ]; then
+ JAVA_BIN=java
+else
+ JAVA_BIN=${JAVA_HOME}/bin/java
+fi
+
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/distro/src/main/bin/oozie-tomcat-server.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-tomcat-server.sh b/distro/src/main/bin/oozie-tomcat-server.sh
new file mode 100644
index 0000000..18dd0f6
--- /dev/null
+++ b/distro/src/main/bin/oozie-tomcat-server.sh
@@ -0,0 +1,89 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CATALINA=${OOZIE_CATALINA_HOME:-${BASEDIR}/oozie-server}/bin/catalina.sh
+
+setup_catalina_opts() {
+ # The Java System properties 'oozie.http.port' and 'oozie.https.port' are not
+ # used by Oozie, they are used in Tomcat's server.xml configuration file
+ #
+ echo "Using CATALINA_OPTS: ${CATALINA_OPTS}"
+
+ catalina_opts="-Doozie.home.dir=${OOZIE_HOME}";
+ catalina_opts="${catalina_opts} -Doozie.config.dir=${OOZIE_CONFIG}";
+ catalina_opts="${catalina_opts} -Doozie.log.dir=${OOZIE_LOG}";
+ catalina_opts="${catalina_opts} -Doozie.data.dir=${OOZIE_DATA}";
+ catalina_opts="${catalina_opts} -Doozie.instance.id=${OOZIE_INSTANCE_ID}"
+
+ catalina_opts="${catalina_opts} -Doozie.config.file=${OOZIE_CONFIG_FILE}";
+
+ catalina_opts="${catalina_opts} -Doozie.log4j.file=${OOZIE_LOG4J_FILE}";
+ catalina_opts="${catalina_opts} -Doozie.log4j.reload=${OOZIE_LOG4J_RELOAD}";
+
+ catalina_opts="${catalina_opts} -Doozie.http.hostname=${OOZIE_HTTP_HOSTNAME}";
+ catalina_opts="${catalina_opts} -Doozie.admin.port=${OOZIE_ADMIN_PORT}";
+ catalina_opts="${catalina_opts} -Doozie.http.port=${OOZIE_HTTP_PORT}";
+ catalina_opts="${catalina_opts} -Doozie.https.port=${OOZIE_HTTPS_PORT}";
+ catalina_opts="${catalina_opts} -Doozie.base.url=${OOZIE_BASE_URL}";
+ catalina_opts="${catalina_opts} -Doozie.https.keystore.file=${OOZIE_HTTPS_KEYSTORE_FILE}";
+ catalina_opts="${catalina_opts} -Doozie.https.keystore.pass=${OOZIE_HTTPS_KEYSTORE_PASS}";
+
+ # add required native libraries such as compression codecs
+ catalina_opts="${catalina_opts} -Djava.library.path=${JAVA_LIBRARY_PATH}";
+
+ echo "Adding to CATALINA_OPTS: ${catalina_opts}"
+
+ export CATALINA_OPTS="${CATALINA_OPTS} ${catalina_opts}"
+}
+
+setup_oozie() {
+ if [ ! -e "${CATALINA_BASE}/webapps/oozie.war" ]; then
+ echo "WARN: Oozie WAR has not been set up at '${CATALINA_BASE}/webapps', doing default set up"
+ ${BASEDIR}/bin/oozie-setup.sh prepare-war
+ if [ "$?" -ne "0" ]; then
+ exit -1
+ fi
+ fi
+ echo
+}
+
+tomcat_main() {
+ source ${BASEDIR}/bin/oozie-sys.sh
+
+ #Create webapp directory from war if lib doesn't exist
+ if [ ! -d "${BASEDIR}/embedded-oozie-server/webapp" ]; then
+ unzip "${BASEDIR}/oozie.war" -d "${BASEDIR}/embedded-oozie-server/webapp" > /dev/null
+ fi
+
+ actionCmd=$1
+ case $actionCmd in
+ (start|run)
+ setup_catalina_opts
+ setup_oozie
+ setup_ooziedb
+ #TODO setup default oozie sharelib
+ ;;
+ (stop)
+ setup_catalina_opts
+
+ # A bug in catalina.sh script does not use CATALINA_OPTS for stopping the server
+ export JAVA_OPTS=${CATALINA_OPTS}
+ ;;
+ esac
+ exec $CATALINA $actionCmd "$@"
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/distro/src/main/bin/oozied.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozied.sh b/distro/src/main/bin/oozied.sh
index a869c3d..462ba76 100644
--- a/distro/src/main/bin/oozied.sh
+++ b/distro/src/main/bin/oozied.sh
@@ -17,14 +17,6 @@
# limitations under the License.
#
-if [ $# -le 0 ]; then
- echo "Usage: oozied.sh (start|stop|run) [<catalina-args...>]"
- exit 1
-fi
-
-actionCmd=$1
-shift
-
# resolve links - $0 may be a softlink
PRG="${0}"
@@ -41,76 +33,28 @@ done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
-source ${BASEDIR}/bin/oozie-sys.sh
-
-CATALINA=${OOZIE_CATALINA_HOME:-${BASEDIR}/oozie-server}/bin/catalina.sh
-
-setup_catalina_opts() {
- # The Java System properties 'oozie.http.port' and 'oozie.https.port' are not
- # used by Oozie, they are used in Tomcat's server.xml configuration file
- #
- echo "Using CATALINA_OPTS: ${CATALINA_OPTS}"
-
- catalina_opts="-Doozie.home.dir=${OOZIE_HOME}";
- catalina_opts="${catalina_opts} -Doozie.config.dir=${OOZIE_CONFIG}";
- catalina_opts="${catalina_opts} -Doozie.log.dir=${OOZIE_LOG}";
- catalina_opts="${catalina_opts} -Doozie.data.dir=${OOZIE_DATA}";
- catalina_opts="${catalina_opts} -Doozie.instance.id=${OOZIE_INSTANCE_ID}"
-
- catalina_opts="${catalina_opts} -Doozie.config.file=${OOZIE_CONFIG_FILE}";
-
- catalina_opts="${catalina_opts} -Doozie.log4j.file=${OOZIE_LOG4J_FILE}";
- catalina_opts="${catalina_opts} -Doozie.log4j.reload=${OOZIE_LOG4J_RELOAD}";
-
- catalina_opts="${catalina_opts} -Doozie.http.hostname=${OOZIE_HTTP_HOSTNAME}";
- catalina_opts="${catalina_opts} -Doozie.admin.port=${OOZIE_ADMIN_PORT}";
- catalina_opts="${catalina_opts} -Doozie.http.port=${OOZIE_HTTP_PORT}";
- catalina_opts="${catalina_opts} -Doozie.https.port=${OOZIE_HTTPS_PORT}";
- catalina_opts="${catalina_opts} -Doozie.base.url=${OOZIE_BASE_URL}";
- catalina_opts="${catalina_opts} -Doozie.https.keystore.file=${OOZIE_HTTPS_KEYSTORE_FILE}";
- catalina_opts="${catalina_opts} -Doozie.https.keystore.pass=${OOZIE_HTTPS_KEYSTORE_PASS}";
-
- # add required native libraries such as compression codecs
- catalina_opts="${catalina_opts} -Djava.library.path=${JAVA_LIBRARY_PATH}";
-
- echo "Adding to CATALINA_OPTS: ${catalina_opts}"
-
- export CATALINA_OPTS="${CATALINA_OPTS} ${catalina_opts}"
-}
-
-setup_oozie() {
- if [ ! -e "${CATALINA_BASE}/webapps/oozie.war" ]; then
- echo "WARN: Oozie WAR has not been set up at '${CATALINA_BASE}/webapps', doing default set up"
- ${BASEDIR}/bin/oozie-setup.sh prepare-war
- if [ "$?" != "0" ]; then
- exit -1
- fi
- fi
- echo
-}
+if [ -e "${BASEDIR}/oozie-server" ]; then
+ export OOZIE_USE_TOMCAT=1
+else
+ export OOZIE_USE_TOMCAT=0
+fi
-setup_ooziedb() {
- echo "Setting up oozie DB"
- ${BASEDIR}/bin/ooziedb.sh create -run
- if [ "$?" != "0" ]; then
- exit -1
+if [ $# -le 0 ]; then
+ if [ "${OOZIE_USE_TOMCAT}" -eq "1" ]; then
+ echo "Usage: oozied.sh (start|stop|run) [<catalina-args...>]"
+ else
+ echo "Usage: oozied.sh (start|stop|run)"
fi
- echo
-}
-
-case $actionCmd in
- (start|run)
- setup_catalina_opts
- setup_oozie
- setup_ooziedb
- #TODO setup default oozie sharelib
- ;;
- (stop)
- setup_catalina_opts
+ exit 1
+fi
- # A bug in catalina.sh script does not use CATALINA_OPTS for stopping the server
- export JAVA_OPTS=${CATALINA_OPTS}
- ;;
-esac
+actionCmd=$1
+shift
-exec $CATALINA $actionCmd "$@"
+if [ "${OOZIE_USE_TOMCAT}" == "1" ]; then
+ source ${BASEDIR}/bin/oozie-tomcat-server.sh
+ tomcat_main $actionCmd
+else
+ source ${BASEDIR}/bin/oozie-jetty-server.sh
+ jetty_main $actionCmd
+fi
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a3db3da..c9a19de 100644
--- a/pom.xml
+++ b/pom.xml
@@ -56,7 +56,7 @@
<failIfNoTests>false</failIfNoTests>
<test.timeout>5400</test.timeout>
-
+ <clover.version>4.0.6</clover.version>
<!-- platform encoding override -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
@@ -105,7 +105,9 @@
<distcp.version>${hadoop.version}</distcp.version>
<hadoop.auth.version>${hadoop.version}</hadoop.auth.version>
- <!-- Tomcat version -->
+ <jetty.version>9.2.19.v20160908</jetty.version>
+
+ <!-- Tomcat version -->
<tomcat.version>6.0.44</tomcat.version>
<jline.version>0.9.94</jline.version>
<openjpa.version>2.4.1</openjpa.version>
@@ -130,6 +132,7 @@
<module>docs</module>
<module>tools</module>
<module>minitest</module>
+ <module>server</module>
<module>distro</module>
<module>zookeeper-security-tests</module>
</modules>
@@ -810,6 +813,12 @@
</dependency>
<dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+
+ <dependency>
<groupId>org.apache.pig</groupId>
<artifactId>pig</artifactId>
<version>${pig.version}</version>
@@ -1545,6 +1554,58 @@
<artifactId>gson</artifactId>
<version>2.7</version>
</dependency>
+ <dependency>
+ <groupId>com.google.inject</groupId>
+ <artifactId>guice</artifactId>
+ <version>3.0</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-http</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-rewrite</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-annotations</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>apache-jsp</artifactId>
+ <type>jar</type>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-plus</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
</dependencies>
</dependencyManagement>
@@ -1659,6 +1720,7 @@
<plugin>
<groupId>com.atlassian.maven.plugins</groupId>
<artifactId>maven-clover2-plugin</artifactId>
+ <version>${clover.version}</version>
<configuration>
<licenseLocation>${clover.license}</licenseLocation>
<generateXml>true</generateXml>
@@ -1959,5 +2021,11 @@
<spark.bagel.version>1.6.2</spark.bagel.version>
</properties>
</profile>
+ <profile>
+ <id>tomcat</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ </profile>
</profiles>
</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 70ffaa6..3071c7b 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
OOZIE-2225 Add wild card filter for gathering jobs (sai-krish via rkanter)
OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/pom.xml
----------------------------------------------------------------------
diff --git a/server/pom.xml b/server/pom.xml
new file mode 100644
index 0000000..a336aa8
--- /dev/null
+++ b/server/pom.xml
@@ -0,0 +1,257 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns="http://maven.apache.org/POM/4.0.0"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>oozie-main</artifactId>
+ <version>4.4.0-SNAPSHOT</version>
+ </parent>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>oozie-server</artifactId>
+ <version>4.4.0-SNAPSHOT</version>
+ <description>Apache Oozie Server</description>
+ <name>Apache Oozie Server</name>
+ <packaging>jar</packaging>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-http</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-rewrite</artifactId>
+ </dependency>
+
+ <!-- begin JSP support -->
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-annotations</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>apache-jsp</artifactId>
+ <type>jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-plus</artifactId>
+ </dependency>
+ <!-- end JSP support -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>oozie-client</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>oozie-core</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>oozie-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.eclipse.jetty.aggregate</groupId>
+ <artifactId>jetty-all</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>com.google.inject</groupId>
+ <artifactId>guice</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.openjpa</groupId>
+ <artifactId>openjpa-maven-plugin</artifactId>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <descriptors>
+ <descriptor>../src/main/assemblies/empty.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifest>
+ <mainClass>org.apache.oozie.server.EmbeddedOozieServer</mainClass>
+ </manifest>
+ </archive>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-http</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-io</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-security</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-jsp</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-plus</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-annotations</artifactId>
+ </artifactItem>
+
+ <!-- JSP support -->
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-annotations</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>apache-jsp</artifactId>
+ <type>jar</type>
+ </artifactItem>
+ <artifactItem>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-plus</artifactId>
+ </artifactItem>
+
+ <artifactItem>
+ <groupId>org.apache.oozie</groupId>
+ <artifactId>webapp</artifactId>
+ </artifactItem>
+
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/assemblies/empty.xml
----------------------------------------------------------------------
diff --git a/server/src/main/assemblies/empty.xml b/server/src/main/assemblies/empty.xml
new file mode 100644
index 0000000..17ff68a
--- /dev/null
+++ b/server/src/main/assemblies/empty.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>empty</id>
+ <formats/>
+</assembly>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/EmbeddedOozieServer.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/EmbeddedOozieServer.java b/server/src/main/java/org/apache/oozie/server/EmbeddedOozieServer.java
new file mode 100644
index 0000000..b7918b7
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/EmbeddedOozieServer.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import com.google.common.base.Preconditions;
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.oozie.server.guice.OozieGuiceModule;
+import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.ServiceException;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.util.ConfigUtils;
+import org.eclipse.jetty.rewrite.handler.RewriteHandler;
+import org.eclipse.jetty.security.ConstraintSecurityHandler;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.servlet.ErrorPageErrorHandler;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Class to start Oozie inside an embedded Jetty server.
+ */
+public class EmbeddedOozieServer {
+ private static final Logger LOG = LoggerFactory.getLogger(EmbeddedOozieServer.class);
+ protected Server server;
+ private int httpPort;
+ private int httpsPort;
+ private final WebAppContext servletContextHandler;
+ private final ServletMapper oozieServletMapper;
+ private final FilterMapper oozieFilterMapper;
+ private JspHandler jspHandler;
+ private Services serviceController;
+ private SSLServerConnectorFactory sslServerConnectorFactory;
+ private Configuration conf;
+ private final RewriteHandler oozieRewriteHandler;
+ private final ConstraintSecurityHandler constraintSecurityHandler;
+
+ /**
+ * Construct Oozie server
+ * @param server jetty server to be embedded
+ * @param jspHandler handler responsible for setting webapp context for JSP
+ * @param serviceController controller for Oozie services; must be already initialized
+ * @param sslServerConnectorFactory factory to create server connector configured for SSL
+ * @param oozieRewriteHandler URL rewriter
+ * @param servletContextHandler main web application context handler
+ * @param oozieServletMapper maps servlets to URLs
+ * @param oozieFilterMapper maps filters
+ * @param constraintSecurityHandler
+ */
+ @Inject
+ public EmbeddedOozieServer(final Server server,
+ final JspHandler jspHandler,
+ final Services serviceController,
+ final SSLServerConnectorFactory sslServerConnectorFactory,
+ final RewriteHandler oozieRewriteHandler,
+ final WebAppContext servletContextHandler,
+ final ServletMapper oozieServletMapper,
+ final FilterMapper oozieFilterMapper,
+ final ConstraintSecurityHandler constraintSecurityHandler)
+ {
+ this.constraintSecurityHandler = constraintSecurityHandler;
+ this.serviceController = Preconditions.checkNotNull(serviceController, "serviceController is null");
+ this.jspHandler = Preconditions.checkNotNull(jspHandler, "jspHandler is null");
+ this.sslServerConnectorFactory = Preconditions.checkNotNull(sslServerConnectorFactory,
+ "sslServerConnectorFactory is null");
+ this.server = Preconditions.checkNotNull(server, "server is null");
+ this.oozieRewriteHandler = Preconditions.checkNotNull(oozieRewriteHandler, "rewriter is null");
+ this.servletContextHandler = Preconditions.checkNotNull(servletContextHandler, "servletContextHandler is null");
+ this.oozieServletMapper = Preconditions.checkNotNull(oozieServletMapper, "oozieServletMapper is null");
+ this.oozieFilterMapper = Preconditions.checkNotNull(oozieFilterMapper, "oozieFilterMapper is null");
+ }
+
+ /**
+ * Set up the Oozie server by configuring jetty server settings and starts Oozie services
+ *
+ * @throws URISyntaxException
+ * @throws IOException
+ * @throws ServiceException
+ */
+ public void setup() throws URISyntaxException, IOException, ServiceException {
+ conf = serviceController.get(ConfigurationService.class).getConf();
+
+ httpPort = getConfigPort(ConfigUtils.OOZIE_HTTP_PORT);
+
+ HttpConfiguration httpConfiguration = new HttpConfigurationWrapper(conf).getDefaultHttpConfiguration();
+
+ ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration));
+ connector.setPort(httpPort);
+ connector.setHost(conf.get(ConfigUtils.OOZIE_HTTP_HOSTNAME));
+
+ HandlerCollection handlerCollection = new HandlerCollection();
+
+ if (isSecured()) {
+ httpsPort = getConfigPort(ConfigUtils.OOZIE_HTTPS_PORT);
+ ServerConnector sslConnector = sslServerConnectorFactory.createSecureServerConnector(httpsPort, conf, server);
+ server.setConnectors(new Connector[]{connector, sslConnector});
+ constraintSecurityHandler.setHandler(servletContextHandler);
+ handlerCollection.addHandler(constraintSecurityHandler);
+ }
+ else {
+ server.setConnectors(new Connector[]{connector});
+ }
+
+ servletContextHandler.setContextPath("/oozie/");
+ oozieServletMapper.mapOozieServlets();
+ oozieFilterMapper.addFilters();
+
+ servletContextHandler.setParentLoaderPriority(true);
+ jspHandler.setupWebAppContext(servletContextHandler);
+
+ addErrorHandler();
+
+ handlerCollection.addHandler(servletContextHandler);
+ handlerCollection.addHandler(oozieRewriteHandler);
+ server.setHandler(handlerCollection);
+ }
+
+ private void addErrorHandler() {
+ ErrorPageErrorHandler errorHandler = new ErrorPageErrorHandler();
+ errorHandler.addErrorPage(404, "/404.html");
+ errorHandler.addErrorPage(403, "/403.html");
+ servletContextHandler.setErrorHandler(errorHandler);
+ }
+
+ private int getConfigPort(String confVar) {
+ String confHttpPort = conf.get(confVar);
+ int port;
+ try {
+ port = Integer.parseInt(confHttpPort);
+ }
+ catch (final NumberFormatException nfe) {
+ throw new NumberFormatException(String.format("Port number for '%s \"%s\" ('%s') is not an integer.",
+ confVar, confHttpPort, confHttpPort));
+ }
+ return port;
+ }
+
+ private boolean isSecured() {
+ String isSSLEnabled = conf.get("oozie.https.enabled");
+ LOG.info("Server started with oozie.https.enabled = " + isSSLEnabled);
+ return isSSLEnabled != null && Boolean.valueOf(isSSLEnabled);
+ }
+
+
+ public void start() throws Exception {
+ server.start();
+ LOG.info("Server started.");
+ }
+
+ public void join() throws InterruptedException {
+ server.join();
+ }
+
+ public void addShutdownHook() {
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+ public void run() {
+ LOG.info("Shutting down.");
+ serviceController.destroy();
+ LOG.info("Oozie services stopped.");
+ }
+ });
+ }
+
+ public static void main(String[] args) throws Exception {
+ final Injector guiceInjector = Guice.createInjector(new OozieGuiceModule());
+
+ final EmbeddedOozieServer embeddedOozieServer = guiceInjector.getInstance(EmbeddedOozieServer.class);
+
+ embeddedOozieServer.setup();
+ embeddedOozieServer.addShutdownHook();
+ try {
+ embeddedOozieServer.start();
+ } catch (Exception e) {
+ LOG.error("Could not start EmbeddedOozieServer!", e);
+ System.exit(1);
+ }
+ embeddedOozieServer.join();
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/FilterMapper.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/FilterMapper.java b/server/src/main/java/org/apache/oozie/server/FilterMapper.java
new file mode 100644
index 0000000..bd78617
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/FilterMapper.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import com.google.common.base.Preconditions;
+import com.google.inject.Inject;
+import org.apache.oozie.servlet.AuthFilter;
+import org.apache.oozie.servlet.HostnameFilter;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.webapp.WebAppContext;
+
+import javax.servlet.DispatcherType;
+import java.util.EnumSet;
+
+public class FilterMapper {
+ private final WebAppContext servletContextHandler;
+
+ @Inject
+ public FilterMapper(final WebAppContext servletContextHandler) {
+ this.servletContextHandler = Preconditions.checkNotNull(servletContextHandler, "ServletContextHandler is null");
+ }
+
+ /**
+ * Map filters to endpoints. Make sure it in sync with ServletMapper when making changes
+ * */
+ void addFilters() {
+ mapFilter(new FilterHolder(new HostnameFilter()), "/*");
+
+ FilterHolder authFilter = new FilterHolder(new AuthFilter());
+ mapFilter(authFilter, "/versions/*");
+ mapFilter(authFilter, "/v0/*");
+ mapFilter(authFilter, "/v1/*");
+ mapFilter(authFilter, "/v2/*");
+ mapFilter(authFilter, "/index.jsp");
+ mapFilter(authFilter, "/admin/*");
+ mapFilter(authFilter, "/*.js");
+ mapFilter(authFilter, "/ext-2.2/*");
+ mapFilter(authFilter, "/docs/*");
+ }
+
+ private void mapFilter(FilterHolder authFilter, String pathSpec) {
+ servletContextHandler.addFilter(authFilter, pathSpec, EnumSet.of(DispatcherType.REQUEST));
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/HttpConfigurationWrapper.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/HttpConfigurationWrapper.java b/server/src/main/java/org/apache/oozie/server/HttpConfigurationWrapper.java
new file mode 100644
index 0000000..0341f9c
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/HttpConfigurationWrapper.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.server;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.eclipse.jetty.server.HttpConfiguration;
+
+/**
+ * Class that wraps HTTP configuration settings.
+ */
+public class HttpConfigurationWrapper {
+ public static final String OOZIE_HTTP_REQUEST_HEADER_SIZE = "oozie.http.request.header.size";
+ public static final String OOZIE_HTTP_RESPONSE_HEADER_SIZE = "oozie.http.response.header.size";
+ private Configuration conf;
+
+ public HttpConfigurationWrapper(Configuration conf) {
+ this.conf = Preconditions.checkNotNull(conf, "conf");
+ }
+
+ /**
+ * Set up and return default HTTP configuration for the Oozie server
+ * @return default HttpConfiguration with the configured request and response header size
+ */
+ public HttpConfiguration getDefaultHttpConfiguration() {
+ HttpConfiguration httpConfiguration = new HttpConfiguration();
+ httpConfiguration.setRequestHeaderSize(
+ getConfigHeaderSize(OOZIE_HTTP_REQUEST_HEADER_SIZE));
+ httpConfiguration.setResponseHeaderSize(
+ getConfigHeaderSize(OOZIE_HTTP_RESPONSE_HEADER_SIZE));
+ httpConfiguration.setSendServerVersion(false);
+ httpConfiguration.setSendXPoweredBy(false);
+ return httpConfiguration;
+ }
+
+ private int getConfigHeaderSize(String confVar) {
+ String confHeaderSize = conf.get(confVar);
+ int headerSize;
+ try {
+ headerSize = Integer.parseInt(confHeaderSize);
+ }
+ catch (final NumberFormatException nfe) {
+ throw new NumberFormatException(String.format("Header size for %s \"%s\" ( '%s') is not an integer.",
+ confVar, confVar, confHeaderSize));
+ }
+ return headerSize;
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ca01c283/server/src/main/java/org/apache/oozie/server/JspHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/JspHandler.java b/server/src/main/java/org/apache/oozie/server/JspHandler.java
new file mode 100644
index 0000000..9658fd6
--- /dev/null
+++ b/server/src/main/java/org/apache/oozie/server/JspHandler.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.server;
+
+import com.google.common.base.Preconditions;
+import org.apache.tomcat.InstanceManager;
+import org.apache.tomcat.SimpleInstanceManager;
+import org.eclipse.jetty.annotations.ServletContainerInitializersStarter;
+import org.eclipse.jetty.apache.jsp.JettyJasperInitializer;
+import org.eclipse.jetty.jsp.JettyJspServlet;
+import org.eclipse.jetty.plus.annotation.ContainerInitializer;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Helper class that is used to handle JSP requests in Oozie server.
+ */
+public class JspHandler {
+ private static final Logger LOG = LoggerFactory.getLogger(JspHandler.class);
+ private final File scratchDir;
+ private final WebRootResourceLocator webRootResourceLocator;
+
+ public JspHandler(final File scratchDir, final WebRootResourceLocator webRootResourceLocator) {
+ this.scratchDir = scratchDir;
+ this.webRootResourceLocator = webRootResourceLocator;
+ }
+
+ /**
+ * Establish Scratch directory for the servlet context (used by JSP compilation)
+ */
+ private File getScratchDir() throws IOException
+ {
+ if (scratchDir.exists()) {
+ LOG.info(String.format("Scratch directory exists and will be reused: %s", scratchDir.getAbsolutePath()));
+ return scratchDir;
+ }
+
+ if (!scratchDir.mkdirs()) {
+ throw new IOException("Unable to create scratch directory: " + scratchDir);
+ }
+
+ LOG.info(String.format("Scratch directory created: %s", scratchDir.getAbsolutePath()));
+ return scratchDir;
+ }
+
+ /**
+ * Setup the basic application "context" for this application at "/"
+ * This is also known as the handler tree (in jetty speak)
+ */
+ public void setupWebAppContext(WebAppContext servletContextHandler)
+ throws IOException, URISyntaxException
+ {
+ Preconditions.checkNotNull(servletContextHandler, "servletContextHandler is null");
+
+ File scratchDir = getScratchDir();
+ servletContextHandler.setAttribute("javax.servlet.context.tempdir", scratchDir);
+ servletContextHandler.setAttribute("org.eclipse.jetty.server.webapp.ContainerIncludeJarPattern",
+ ".*/[^/]*servlet-api-[^/]*\\.jar$|.*/javax.servlet.jsp.jstl-.*\\.jar$|.*/.*taglibs.*\\.jar$");
+ URI baseUri = webRootResourceLocator.getWebRootResourceUri();
+ servletContextHandler.setResourceBase(baseUri.toASCIIString());
+ servletContextHandler.setAttribute("org.eclipse.jetty.containerInitializers", jspInitializers());
+ servletContextHandler.setAttribute(InstanceManager.class.getName(), new SimpleInstanceManager());
+ servletContextHandler.addBean(new ServletContainerInitializersStarter(servletContextHandler), true);
+ servletContextHandler.setClassLoader(getUrlClassLoader());
+
+ servletContextHandler.addServlet(jspServletHolder(), "*.jsp");
+
+ servletContextHandler.addServlet(jspFileMappedServletHolder(), "/oozie/");
+ servletContextHandler.addServlet(defaultServletHolder(baseUri), "/");
+ }
+
+ /**
+ * Ensure the jsp engine is initialized correctly
+ */
+ private List<ContainerInitializer> jspInitializers()
+ {
+ JettyJasperInitializer sci = new JettyJasperInitializer();
+ ContainerInitializer initializer = new ContainerInitializer(sci, null);
+ List<ContainerInitializer> initializers = new ArrayList<>();
+ initializers.add(initializer);
+ return initializers;
+ }
+
+ /**
+ * Set Classloader of Context to be sane (needed for JSTL)
+ * JSP requires a non-System classloader, this simply wraps the
+ * embedded System classloader in a way that makes it suitable
+ * for JSP to use
+ */
+ private ClassLoader getUrlClassLoader()
+ {
+ ClassLoader jspClassLoader = new URLClassLoader(new URL[0], this.getClass().getClassLoader());
+ return jspClassLoader;
+ }
+
+ /**
+ * Create JSP Servlet (must be named "jsp")
+ */
+ private ServletHolder jspServletHolder()
+ {
+ ServletHolder holderJsp = new ServletHolder("jsp", JettyJspServlet.class);
+ holderJsp.setInitOrder(0);
+ holderJsp.setInitParameter("logVerbosityLevel", "DEBUG");
+ holderJsp.setInitParameter("fork", "false");
+ holderJsp.setInitParameter("xpoweredBy", "false");
+ holderJsp.setInitParameter("compilerTargetVM", "1.7");
+ holderJsp.setInitParameter("compilerSourceVM", "1.7");
+ holderJsp.setInitParameter("keepgenerated", "true");
+ return holderJsp;
+ }
+
+ /**
+ * Create Example of mapping jsp to path spec
+ */
+ private ServletHolder jspFileMappedServletHolder()
+ {
+ ServletHolder holderAltMapping = new ServletHolder();
+ holderAltMapping.setName("index.jsp");
+ holderAltMapping.setForcedPath("/index.jsp");
+ return holderAltMapping;
+ }
+
+ /**
+ * Create Default Servlet (must be named "default")
+ */
+ private ServletHolder defaultServletHolder(URI baseUri)
+ {
+ ServletHolder holderDefault = new ServletHolder("default", DefaultServlet.class);
+ holderDefault.setInitParameter("resourceBase", baseUri.toASCIIString());
+ holderDefault.setInitParameter("dirAllowed", "true");
+ return holderDefault;
+ }
+}
[18/48] oozie git commit: OOZIE-2737 testConfigDefaultPropsToAction
is flaky (satishsaley via rohini)
Posted by pb...@apache.org.
OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/1d54e476
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/1d54e476
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/1d54e476
Branch: refs/heads/oya
Commit: 1d54e476f7abe0317cffa526d9b42e21e2497fed
Parents: b327fdb
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Fri Nov 18 12:44:12 2016 -0800
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Fri Nov 18 12:44:12 2016 -0800
----------------------------------------------------------------------
release-log.txt | 1 +
.../hadoop/TestMapReduceActionExecutor.java | 23 ++++++++++++++------
2 files changed, 17 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/1d54e476/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index caceaa9..acfac17 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
OOZIE-2225 Add wild card filter for gathering jobs (sai-krish,pbacsko via rkanter,rohini)
http://git-wip-us.apache.org/repos/asf/oozie/blob/1d54e476/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
index 2713526..c7860be 100644
--- a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
+++ b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
@@ -33,6 +33,7 @@ import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.client.WorkflowAction.Status;
import org.apache.oozie.command.wf.StartXCommand;
import org.apache.oozie.command.wf.SubmitXCommand;
import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor;
@@ -138,11 +139,10 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
String wfId = new SubmitXCommand(conf).call();
new StartXCommand(wfId).call();
- sleep(3000);
+ waitForWorkflowAction(wfId + "@mr-node");
WorkflowActionBean mrAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION,
wfId + "@mr-node");
-
// check NN and JT settings
Element eAction = XmlUtils.parseXml(mrAction.getConf());
Element eConf = eAction.getChild("name-node", eAction.getNamespace());
@@ -193,12 +193,12 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
writer = new FileWriter(getTestCaseDir() + "/workflow.xml");
IOUtils.copyCharStream(new StringReader(wfXml), writer);
- wfId = new SubmitXCommand(conf).call();
- new StartXCommand(wfId).call();
- sleep(3000);
+ wfId = new SubmitXCommand(conf).call();
+ new StartXCommand(wfId).call();
+ waitForWorkflowAction(wfId + "@mr-node");
- mrAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION,
- wfId + "@mr-node");
+ mrAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION,
+ wfId + "@mr-node");
// check param
eAction = XmlUtils.parseXml(mrAction.getConf());
@@ -1362,5 +1362,14 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
}
}
+ private void waitForWorkflowAction(final String actionId) {
+ waitFor(3 * 60 * 1000, new Predicate() {
+ public boolean evaluate() throws Exception {
+ WorkflowActionBean mrAction = WorkflowActionQueryExecutor.getInstance()
+ .get(WorkflowActionQuery.GET_ACTION, actionId);
+ return mrAction.inTerminalState() || mrAction.getStatus() == Status.RUNNING;
+ }
+ });
+ }
}
[44/48] oozie git commit: OOZIE-2745 test-patch should also list the
failed tests (gezapeti via rkanter)
Posted by pb...@apache.org.
OOZIE-2745 test-patch should also list the failed tests (gezapeti via rkanter)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/62c060c3
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/62c060c3
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/62c060c3
Branch: refs/heads/oya
Commit: 62c060c34fdb5c1298829c6175caf5d645bb8e7d
Parents: ab6ae75
Author: Robert Kanter <rk...@cloudera.com>
Authored: Mon Dec 5 15:50:58 2016 -0800
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Mon Dec 5 15:50:58 2016 -0800
----------------------------------------------------------------------
bin/test-patch-20-tests | 4 ++++
release-log.txt | 1 +
2 files changed, 5 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/62c060c3/bin/test-patch-20-tests
----------------------------------------------------------------------
diff --git a/bin/test-patch-20-tests b/bin/test-patch-20-tests
index 7595c7c..d096575 100755
--- a/bin/test-patch-20-tests
+++ b/bin/test-patch-20-tests
@@ -95,6 +95,7 @@ case $OP in
;;
report)
failedTests=` find . -name '*\.txt' | grep target/surefire-reports | xargs grep "<<< FAILURE" | grep -v "Tests run:" | sed 's/.*\.txt\://' | sed 's/ .*//'`
+ testsWithError=` find . -name '*\.txt' | grep target/surefire-reports | xargs grep "<<< ERROR" | grep -v "Tests run:" | sed 's/.*\.txt\://' | sed 's/ .*//'`
testsRun=`grep "Tests run:" ${TEMPDIR}/${TASKNAME}.out | grep -v " Time elapsed:" | awk '{print $3}' | sed 's/,//' | awk 'BEGIN {count=0} {count=count+$1} END {print count}'`
testsFailed=`grep "Tests run:" ${TEMPDIR}/${TASKNAME}.out | grep -v " Time elapsed:" | awk '{print $5}' | sed 's/,//' | awk 'BEGIN {count=0} {count=count+$1} END {print count}'`
testsErrors=`grep "Tests run:" ${TEMPDIR}/${TASKNAME}.out | grep -v " Time elapsed:" | awk '{print $7}' | sed 's/,//' | awk 'BEGIN {count=0} {count=count+$1} END {print count}'`
@@ -110,6 +111,9 @@ case $OP in
echo "" >> ${SUMMARYFILE}
echo "${failedTests}" | sed 's/^/. /' >> $SUMMARYFILE
echo "" >> ${SUMMARYFILE}
+ echo ". Tests failing with errors:" >> $SUMMARYFILE
+ echo "${testsWithError}" | sed 's/^/. /' >> $SUMMARYFILE
+ echo "" >> ${SUMMARYFILE}
else
if [[ "$testsExitCode" != "0" ]] ; then
echo "{color:red}-1 ${TASKNAME}{color} - patch does not compile, cannot run testcases" >> $SUMMARYFILE
http://git-wip-us.apache.org/repos/asf/oozie/blob/62c060c3/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index c3b7ab7..2fd869f 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2745 test-patch should also list the failed tests (gezapeti via rkanter)
OOZIE-2740 oozie help misspelled coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
OOZIE-2690 OOZIE NPE while executing kill() (abhishekbafna via jaydeepvishwakarma)
OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
[19/48] oozie git commit: OOZIE-2690 OOZIE NPE while executing kill()
(abhishekbafna via jaydeepvishwakarma)
Posted by pb...@apache.org.
OOZIE-2690 OOZIE NPE while executing kill() (abhishekbafna via jaydeepvishwakarma)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/05337002
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/05337002
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/05337002
Branch: refs/heads/oya
Commit: 05337002fc50d78750979dc15ecea5ad80c14931
Parents: 1d54e47
Author: jvishwakarma <jv...@walmartlabs.com>
Authored: Mon Nov 21 12:01:39 2016 +0530
Committer: jvishwakarma <jv...@walmartlabs.com>
Committed: Mon Nov 21 12:01:39 2016 +0530
----------------------------------------------------------------------
.../org/apache/oozie/action/hadoop/JavaActionExecutor.java | 6 +++++-
.../apache/oozie/action/hadoop/MapReduceActionExecutor.java | 9 ++++-----
2 files changed, 9 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/05337002/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index 65996d9..f315af7 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -1410,7 +1410,11 @@ public class JavaActionExecutor extends ActionExecutor {
}
protected RunningJob getRunningJob(Context context, WorkflowAction action, JobClient jobClient) throws Exception{
- RunningJob runningJob = jobClient.getJob(JobID.forName(action.getExternalId()));
+ String externalId = action.getExternalId();
+ RunningJob runningJob = null;
+ if (externalId != null) {
+ runningJob = jobClient.getJob(JobID.forName(externalId));
+ }
return runningJob;
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/05337002/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
index c36a675..55c9372 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
@@ -335,12 +335,11 @@ public class MapReduceActionExecutor extends JavaActionExecutor {
@Override
protected RunningJob getRunningJob(Context context, WorkflowAction action, JobClient jobClient) throws Exception{
-
- RunningJob runningJob;
+ RunningJob runningJob = null;
String jobId = getActualExternalId(action);
-
- runningJob = jobClient.getJob(JobID.forName(jobId));
-
+ if (jobId != null) {
+ runningJob = jobClient.getJob(JobID.forName(jobId));
+ }
return runningJob;
}
[27/48] oozie git commit: OOZIE-1986 more addendum fixes
Posted by pb...@apache.org.
OOZIE-1986 more addendum fixes
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/28ac9582
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/28ac9582
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/28ac9582
Branch: refs/heads/oya
Commit: 28ac958206c51002247f9ab308bf9f67208c1add
Parents: d586b7a
Author: Robert Kanter <rk...@cloudera.com>
Authored: Tue Nov 22 16:30:20 2016 -0800
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Tue Nov 22 16:30:20 2016 -0800
----------------------------------------------------------------------
bin/test-patch-11-findbugs-diff | 28 ++++++++++++++++++++++++----
1 file changed, 24 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/28ac9582/bin/test-patch-11-findbugs-diff
----------------------------------------------------------------------
diff --git a/bin/test-patch-11-findbugs-diff b/bin/test-patch-11-findbugs-diff
index e8bfb3b..f7a4b2f 100755
--- a/bin/test-patch-11-findbugs-diff
+++ b/bin/test-patch-11-findbugs-diff
@@ -36,6 +36,7 @@ FINDBUGS_XML_NAME=findbugsXml.xml
cleanup_and_exit() {
remove_file_if_present "${DIFF_DIR}/${FINDBUGS_JAR}"
remove_file_if_present "${DIFF_DIR}/${FINDBUGS_JAR}.md5"
+ remove_file_if_present "${DIFF_DIR}/${FINDBUGS_JAR}.md5sum"
exit "$1"
}
@@ -177,16 +178,27 @@ download_and_check_findbugs_diff_jar() {
if hash md5 2>/dev/null; then
md5 -q "${DIFF_DIR}/${FINDBUGS_JAR}" > "${DIFF_DIR}/${FINDBUGS_JAR}.md5"
elif hash md5sum 2>/dev/null; then
- md5sum "${DIFF_DIR}/${FINDBUGS_JAR}" > "${DIFF_DIR}/${FINDBUGS_JAR}.md5"
+ md5Content=($(md5sum "${DIFF_DIR}/${FINDBUGS_JAR}"))
+ echo "${md5Content}" > "${DIFF_DIR}/${FINDBUGS_JAR}.md5sum"
else
echo "[ERROR] Neither md5 nor md5sum are present, cannot check FindBugs diff JAR"
echo "{color:red}-1{color} Neither md5 nor md5sum are present, cannot check FindBugs diff JAR." >> "${SUMMARYFILE}"
cleanup_and_exit 1
fi
- jarMd5DiffCount=$(grep -Fxvf "${BASH_DIR}/${FINDBUGS_JAR}.md5" "${DIFF_DIR}/${FINDBUGS_JAR}.md5" | wc -l)
+ if hash md5 2>/dev/null; then
+ jarMd5DiffCount=$(grep -Fxvf "${BASH_DIR}/${FINDBUGS_JAR}.md5" "${DIFF_DIR}/${FINDBUGS_JAR}.md5" | wc -l)
+ elif hash md5sum 2>/dev/null; then
+ jarMd5DiffCount=$(grep -Fxvf "${BASH_DIR}/${FINDBUGS_JAR}.md5sum" "${DIFF_DIR}/${FINDBUGS_JAR}.md5sum" | wc -l)
+ fi
if [ ${jarMd5DiffCount} -gt "0" ]; then
+ if hash md5 2>/dev/null; then
+ echo "[TRACE] md5 of FindBugs diff jar is " < cat ${DIFF_DIR}/${FINDBUGS_JAR}.md5
+ elif hash md5sum 2>/dev/null; then
+ echo "[TRACE] md5sum of FindBugs diff jar is " < cat ${DIFF_DIR}/${FINDBUGS_JAR}.md5sum
+ fi
+
echo "[ERROR] FindBugs diff JAR has a weird MD5 sum, rejecting"
echo "{color:red}-1{color} FindBugs diff JAR has a weird MD5 sum, rejecting." >> "${SUMMARYFILE}"
cleanup_and_exit 1
@@ -256,8 +268,16 @@ check_findbugs_diffs_and_create_reports() {
htmlFileName=${componentDir%%.xml}.html
componentDir=${componentDir%%/findbugs-new.xml}
- newBugTotalCount=$(xmllint --xpath "count(/BugCollection/BugInstance)" "${fn}")
- newBugBelowThresholdCount=$(xmllint --xpath "count(/BugCollection/BugInstance[@priority <= ${FINDBUGS_PRIORITY_THRESHOLD} or @rank <= ${FINDBUGS_RANK_THRESHOLD}])" "${fn}")
+ xmlLintXPathCompatible=$(xmllint | grep -e '\-\-xpath' | wc -l)
+ if [ "${xmlLintXPathCompatible}" -eq "0" ]; then
+ echo "[TRACE] Old XMLLib present, calling 'xmllint --shell' to get bug instance counts"
+ newBugTotalCount=$(xmllint --shell "${fn}" <<< 'xpath count(/BugCollection/BugInstance)' | grep -oE '[^ ]+$')
+ newBugBelowThresholdCount=$(xmllint --shell "${fn}" <<< 'xpath count(/BugCollection/BugInstance[@priority <= "${FINDBUGS_PRIORITY_THRESHOLD}" or @rank <= "${FINDBUGS_RANK_THRESHOLD}"])' | grep -oE '[^ ]+$')
+ else
+ echo "[TRACE] New XMLLib present, calling 'xmllint --xpath' to get bug instance counts"
+ newBugTotalCount=$(xmllint --xpath "count(/BugCollection/BugInstance)" "${fn}")
+ newBugBelowThresholdCount=$(xmllint --xpath "count(/BugCollection/BugInstance[@priority <= ${FINDBUGS_PRIORITY_THRESHOLD} or @rank <= ${FINDBUGS_RANK_THRESHOLD}])" "${fn}")
+ fi
belowThresholdCount=$((belowThresholdCount + newBugBelowThresholdCount))
totalCount=$((totalCount + newBugTotalCount))
[14/48] oozie git commit: OOZIE-2729 OYA: Use MiniYARNCluster in
tests. TODO: refactor XTestCase.
Posted by pb...@apache.org.
OOZIE-2729 OYA: Use MiniYARNCluster in tests. TODO: refactor XTestCase.
Change-Id: I520655abf645625a44cd7df88e435686fe04fe00
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/d5dcc5ce
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/d5dcc5ce
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/d5dcc5ce
Branch: refs/heads/oya
Commit: d5dcc5cec2e080413e2540f43d3877b4d56f99ad
Parents: 782837f
Author: Andras Piros <an...@cloudera.com>
Authored: Thu Nov 17 12:33:24 2016 +0100
Committer: Andras Piros <an...@cloudera.com>
Committed: Thu Nov 17 12:33:24 2016 +0100
----------------------------------------------------------------------
.../java/org/apache/oozie/test/XTestCase.java | 430 +++++++++----------
1 file changed, 215 insertions(+), 215 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/d5dcc5ce/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index fd6d4ad..ca3f883 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -27,14 +27,9 @@ import java.io.OutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
-import java.util.ArrayList;
-import java.util.EnumSet;
+import java.util.*;
import java.net.UnknownHostException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
import java.util.Map.Entry;
-import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
@@ -55,13 +50,14 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
@@ -147,24 +143,24 @@ public abstract class XTestCase extends TestCase {
OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile();
}
- String testPropsFile = System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
- File file = new File(testPropsFile).isAbsolute()
- ? new File(testPropsFile) : new File(OOZIE_SRC_DIR, testPropsFile);
+ final String testPropsFile = System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
+ final File file = new File(testPropsFile).isAbsolute()
+ ? new File(testPropsFile) : new File(OOZIE_SRC_DIR, testPropsFile);
if (file.exists()) {
System.out.println();
System.out.println("*********************************************************************************");
System.out.println("Loading test system properties from: " + file.getAbsolutePath());
System.out.println();
- Properties props = new Properties();
+ final Properties props = new Properties();
props.load(new FileReader(file));
- for (Map.Entry entry : props.entrySet()) {
+ for (final Map.Entry entry : props.entrySet()) {
if (!System.getProperties().containsKey(entry.getKey())) {
System.setProperty((String) entry.getKey(), (String) entry.getValue());
System.out.println(entry.getKey() + " = " + entry.getValue());
}
else {
System.out.println(entry.getKey() + " IGNORED, using command line value = " +
- System.getProperty((String) entry.getKey()));
+ System.getProperty((String) entry.getKey()));
}
}
System.out.println("*********************************************************************************");
@@ -173,14 +169,13 @@ public abstract class XTestCase extends TestCase {
else {
if (System.getProperty(OOZIE_TEST_PROPERTIES) != null) {
System.err.println();
- System.err.println("ERROR: Specified test file does not exist: " +
- System.getProperty(OOZIE_TEST_PROPERTIES));
+ System.err.println("ERROR: Specified test file does not exist: " +
+ System.getProperty(OOZIE_TEST_PROPERTIES));
System.err.println();
System.exit(-1);
}
}
- }
- catch (IOException ex) {
+ } catch (final IOException ex) {
throw new RuntimeException(ex);
}
@@ -261,12 +256,12 @@ public abstract class XTestCase extends TestCase {
/**
* Name of the shell command
*/
- protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS)? "cmd": "bash";
+ protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd" : "bash";
/**
* Extension for shell script files
*/
- protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS)? "cmd": "sh";
+ protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS) ? "cmd" : "sh";
/**
* Option for shell command to pass script files
@@ -297,12 +292,12 @@ public abstract class XTestCase extends TestCase {
* @param cleanUpDBTables true if should cleanup the database tables, false if not
* @throws Exception if the test workflow working directory could not be created or there was a problem cleaning the database
*/
- protected void setUp(boolean cleanUpDBTables) throws Exception {
+ protected void setUp(final boolean cleanUpDBTables) throws Exception {
RUNNING_TESTCASES.incrementAndGet();
super.setUp();
- String baseDir = System.getProperty(OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
+ final String baseDir = System.getProperty(OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
String msg = null;
- File f = new File(baseDir);
+ final File f = new File(baseDir);
if (!f.isAbsolute()) {
msg = XLog.format("System property [{0}]=[{1}] must be set to an absolute path", OOZIE_TEST_DIR, baseDir);
}
@@ -319,7 +314,7 @@ public abstract class XTestCase extends TestCase {
f.mkdirs();
if (!f.exists() || !f.isDirectory()) {
System.err.println();
- System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
+ System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
System.exit(-1);
}
hadoopVersion = System.getProperty(HADOOP_VERSION, "0.20.0");
@@ -331,12 +326,12 @@ public abstract class XTestCase extends TestCase {
testCaseConfDir = createTestCaseSubDir("conf");
// load test Oozie site
- String oozieTestDB = System.getProperty("oozie.test.db", "hsqldb");
- String defaultOozieSize =
- new File(OOZIE_SRC_DIR, "core/src/test/resources/" + oozieTestDB + "-oozie-site.xml").getAbsolutePath();
- String customOozieSite = System.getProperty("oozie.test.config.file", defaultOozieSize);
+ final String oozieTestDB = System.getProperty("oozie.test.db", "hsqldb");
+ final String defaultOozieSize =
+ new File(OOZIE_SRC_DIR, "core/src/test/resources/" + oozieTestDB + "-oozie-site.xml").getAbsolutePath();
+ final String customOozieSite = System.getProperty("oozie.test.config.file", defaultOozieSize);
File source = new File(customOozieSite);
- if(!source.isAbsolute()) {
+ if (!source.isAbsolute()) {
source = new File(OOZIE_SRC_DIR, customOozieSite);
}
source = source.getAbsoluteFile();
@@ -346,7 +341,7 @@ public abstract class XTestCase extends TestCase {
}
else {
// If we can't find it, try using the class loader (useful if we're using XTestCase from outside core)
- URL sourceURL = getClass().getClassLoader().getResource(oozieTestDB + "-oozie-site.xml");
+ final URL sourceURL = getClass().getClassLoader().getResource(oozieTestDB + "-oozie-site.xml");
if (sourceURL != null) {
oozieSiteSourceStream = sourceURL.openStream();
}
@@ -354,35 +349,35 @@ public abstract class XTestCase extends TestCase {
// If we still can't find it, then exit
System.err.println();
System.err.println(XLog.format("Custom configuration file for testing does not exist [{0}]",
- source.getAbsolutePath()));
+ source.getAbsolutePath()));
System.err.println();
System.exit(-1);
}
}
// Copy the specified oozie-site file from oozieSiteSourceStream to the test case dir as oozie-site.xml
- Configuration oozieSiteConf = new Configuration(false);
+ final Configuration oozieSiteConf = new Configuration(false);
oozieSiteConf.addResource(oozieSiteSourceStream);
- ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
- InputStream inputStream = classLoader.getResourceAsStream(ConfigurationService.DEFAULT_CONFIG_FILE);
- XConfiguration configuration = new XConfiguration(inputStream);
- String classes = configuration.get(Services.CONF_SERVICE_CLASSES);
+ final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+ final InputStream inputStream = classLoader.getResourceAsStream(ConfigurationService.DEFAULT_CONFIG_FILE);
+ final XConfiguration configuration = new XConfiguration(inputStream);
+ final String classes = configuration.get(Services.CONF_SERVICE_CLASSES);
// Disable sharelib service as it cannot find the sharelib jars
// as maven has target/classes in classpath and not the jar because test phase is before package phase
- oozieSiteConf.set(Services.CONF_SERVICE_CLASSES, classes.replaceAll("org.apache.oozie.service.ShareLibService,",""));
+ oozieSiteConf.set(Services.CONF_SERVICE_CLASSES, classes.replaceAll("org.apache.oozie.service.ShareLibService,", ""));
// Make sure to create the Oozie DB during unit tests
oozieSiteConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
File target = new File(testCaseConfDir, "oozie-site.xml");
oozieSiteConf.writeXml(new FileOutputStream(target));
- File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
+ final File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
hadoopConfDir.mkdir();
- File actionConfDir = new File(testCaseConfDir, "action-conf");
+ final File actionConfDir = new File(testCaseConfDir, "action-conf");
actionConfDir.mkdir();
source = new File(OOZIE_SRC_DIR, "core/src/test/resources/hadoop-config.xml");
InputStream hadoopConfigResourceStream = null;
if (!source.exists()) {
// If we can't find it, try using the class loader (useful if we're using XTestCase from outside core)
- URL sourceURL = getClass().getClassLoader().getResource("hadoop-config.xml");
+ final URL sourceURL = getClass().getClassLoader().getResource("hadoop-config.xml");
if (sourceURL != null) {
hadoopConfigResourceStream = sourceURL.openStream();
}
@@ -415,17 +410,17 @@ public abstract class XTestCase extends TestCase {
}
if (System.getProperty("oozie.test.db.host") == null) {
- System.setProperty("oozie.test.db.host", "localhost");
+ System.setProperty("oozie.test.db.host", "localhost");
}
setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
- setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS,"*");
+ setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
- if (mrCluster != null) {
- OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"));
- Configuration conf = createJobConfFromMRCluster();
- conf.writeXml(os);
- os.close();
+ if (yarnCluster != null) {
+ try (final OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"))) {
+ final Configuration conf = createJobConfFromYarnCluster();
+ conf.writeXml(os);
+ }
}
if (System.getProperty("oozie.test.metastore.server", "false").equals("true")) {
@@ -474,12 +469,12 @@ public abstract class XTestCase extends TestCase {
* reason for the manual parsing instead of an actual File.toURI is because Oozie tests use tokens ${}
* frequently. Something like URI("c:/temp/${HOUR}").toString() will generate escaped values that will break tests
*/
- protected String getTestCaseFileUri(String relativeUri) {
+ protected String getTestCaseFileUri(final String relativeUri) {
String uri = new File(testCaseDir).toURI().toString();
// truncates '/' if the testCaseDir was provided with a fullpath ended with separator
- if (uri.endsWith("/")){
- uri = uri.substring(0, uri.length() -1);
+ if (uri.endsWith("/")) {
+ uri = uri.substring(0, uri.length() - 1);
}
return uri + "/" + relativeUri;
@@ -518,7 +513,7 @@ public abstract class XTestCase extends TestCase {
/**
* Return an alternate test user Id that belongs
- to the test group.
+ to the test group.
*
* @return the user Id.
*/
@@ -562,7 +557,7 @@ public abstract class XTestCase extends TestCase {
* @param testCase testcase instance to obtain the working directory.
* @return the test working directory.
*/
- private String getTestCaseDirInternal(TestCase testCase) {
+ private String getTestCaseDirInternal(final TestCase testCase) {
ParamChecker.notNull(testCase, "testCase");
File dir = new File(System.getProperty(OOZIE_TEST_DIR, "target/test-data"));
dir = new File(dir, "oozietests").getAbsoluteFile();
@@ -571,16 +566,16 @@ public abstract class XTestCase extends TestCase {
return dir.getAbsolutePath();
}
- protected void delete(File file) throws IOException {
+ protected void delete(final File file) throws IOException {
ParamChecker.notNull(file, "file");
if (file.getAbsolutePath().length() < 5) {
throw new RuntimeException(XLog.format("path [{0}] is too short, not deleting", file.getAbsolutePath()));
}
if (file.exists()) {
if (file.isDirectory()) {
- File[] children = file.listFiles();
+ final File[] children = file.listFiles();
if (children != null) {
- for (File child : children) {
+ for (final File child : children) {
delete(child);
}
}
@@ -604,14 +599,14 @@ public abstract class XTestCase extends TestCase {
* @return return the path of the test working directory, it is always an absolute path.
* @throws Exception if the test working directory could not be created or cleaned up.
*/
- private String createTestCaseDir(TestCase testCase, boolean cleanup) throws Exception {
- String testCaseDir = getTestCaseDirInternal(testCase);
+ private String createTestCaseDir(final TestCase testCase, final boolean cleanup) throws Exception {
+ final String testCaseDir = getTestCaseDirInternal(testCase);
System.out.println();
System.out.println(XLog.format("Setting testcase work dir[{0}]", testCaseDir));
if (cleanup) {
delete(new File(testCaseDir));
}
- File dir = new File(testCaseDir);
+ final File dir = new File(testCaseDir);
if (!dir.mkdirs()) {
throw new RuntimeException(XLog.format("Could not create testcase dir[{0}]", testCaseDir));
}
@@ -624,7 +619,7 @@ public abstract class XTestCase extends TestCase {
* @param subDirNames a list of progressively deeper directory names
* @return the absolute path to the created directory.
*/
- protected String createTestCaseSubDir(String... subDirNames) {
+ protected String createTestCaseSubDir(final String... subDirNames) {
ParamChecker.notNull(subDirNames, "subDirName");
if (subDirNames.length == 0) {
throw new RuntimeException(XLog.format("Could not create testcase subdir ''; it already exists"));
@@ -650,12 +645,12 @@ public abstract class XTestCase extends TestCase {
* @param name system property name.
* @param value value to set.
*/
- protected void setSystemProperty(String name, String value) {
+ protected void setSystemProperty(final String name, final String value) {
if (sysProps == null) {
sysProps = new HashMap<String, String>();
}
if (!sysProps.containsKey(name)) {
- String currentValue = System.getProperty(name);
+ final String currentValue = System.getProperty(name);
sysProps.put(name, currentValue);
}
if (value != null) {
@@ -671,7 +666,7 @@ public abstract class XTestCase extends TestCase {
*/
private void resetSystemProperties() {
if (sysProps != null) {
- for (Map.Entry<String, String> entry : sysProps.entrySet()) {
+ for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
if (entry.getValue() != null) {
System.setProperty(entry.getKey(), entry.getValue());
}
@@ -704,11 +699,11 @@ public abstract class XTestCase extends TestCase {
* @param predicate predicate waiting on.
* @return the waited time.
*/
- protected long waitFor(int timeout, Predicate predicate) {
+ protected long waitFor(final int timeout, final Predicate predicate) {
ParamChecker.notNull(predicate, "predicate");
- XLog log = new XLog(LogFactory.getLog(getClass()));
- long started = System.currentTimeMillis();
- long mustEnd = System.currentTimeMillis() + (long)(WAITFOR_RATIO * timeout);
+ final XLog log = new XLog(LogFactory.getLog(getClass()));
+ final long started = System.currentTimeMillis();
+ final long mustEnd = System.currentTimeMillis() + (long) (WAITFOR_RATIO * timeout);
long lastEcho = 0;
try {
long waiting = mustEnd - System.currentTimeMillis();
@@ -726,8 +721,7 @@ public abstract class XTestCase extends TestCase {
log.info("Waiting timed out after [{0}] msec", timeout);
}
return System.currentTimeMillis() - started;
- }
- catch (Exception ex) {
+ } catch (final Exception ex) {
throw new RuntimeException(ex);
}
}
@@ -737,7 +731,7 @@ public abstract class XTestCase extends TestCase {
*
* @param sleepTime time in milliseconds to wait
*/
- protected void sleep(int sleepTime) {
+ protected void sleep(final int sleepTime) {
waitFor(sleepTime, new Predicate() {
@Override
public boolean evaluate() throws Exception {
@@ -777,7 +771,7 @@ public abstract class XTestCase extends TestCase {
}
public String getKeytabFile() {
- String defaultFile = new File(System.getProperty("user.home"), "oozie.keytab").getAbsolutePath();
+ final String defaultFile = new File(System.getProperty("user.home"), "oozie.keytab").getAbsolutePath();
return System.getProperty("oozie.test.kerberos.keytab.file", defaultFile);
}
@@ -787,7 +781,7 @@ public abstract class XTestCase extends TestCase {
public String getOoziePrincipal() {
return System.getProperty("oozie.test.kerberos.oozie.principal",
- getOozieUser() + "/localhost") + "@" + getRealm();
+ getOozieUser() + "/localhost") + "@" + getRealm();
}
protected MiniHCatServer getHCatalogServer() {
@@ -811,12 +805,11 @@ public abstract class XTestCase extends TestCase {
// needed to cleanup the database and shut them down when done; the test will likely start its own Services later and
// we don't want to interfere
try {
- Services services = new Services();
+ final Services services = new Services();
services.getConf().set(Services.CONF_SERVICE_CLASSES, MINIMAL_SERVICES_FOR_DB_CLEANUP);
services.init();
cleanUpDBTablesInternal();
- }
- finally {
+ } finally {
if (Services.get() != null) {
Services.get().destroy();
}
@@ -825,70 +818,70 @@ public abstract class XTestCase extends TestCase {
}
private void cleanUpDBTablesInternal() throws StoreException {
- EntityManager entityManager = Services.get().get(JPAService.class).getEntityManager();
+ final EntityManager entityManager = Services.get().get(JPAService.class).getEntityManager();
entityManager.setFlushMode(FlushModeType.COMMIT);
entityManager.getTransaction().begin();
Query q = entityManager.createNamedQuery("GET_WORKFLOWS");
- List<WorkflowJobBean> wfjBeans = q.getResultList();
- int wfjSize = wfjBeans.size();
- for (WorkflowJobBean w : wfjBeans) {
+ final List<WorkflowJobBean> wfjBeans = q.getResultList();
+ final int wfjSize = wfjBeans.size();
+ for (final WorkflowJobBean w : wfjBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_ACTIONS");
- List<WorkflowActionBean> wfaBeans = q.getResultList();
- int wfaSize = wfaBeans.size();
- for (WorkflowActionBean w : wfaBeans) {
+ final List<WorkflowActionBean> wfaBeans = q.getResultList();
+ final int wfaSize = wfaBeans.size();
+ for (final WorkflowActionBean w : wfaBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_COORD_JOBS");
- List<CoordinatorJobBean> cojBeans = q.getResultList();
- int cojSize = cojBeans.size();
- for (CoordinatorJobBean w : cojBeans) {
+ final List<CoordinatorJobBean> cojBeans = q.getResultList();
+ final int cojSize = cojBeans.size();
+ for (final CoordinatorJobBean w : cojBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_COORD_ACTIONS");
- List<CoordinatorActionBean> coaBeans = q.getResultList();
- int coaSize = coaBeans.size();
- for (CoordinatorActionBean w : coaBeans) {
+ final List<CoordinatorActionBean> coaBeans = q.getResultList();
+ final int coaSize = coaBeans.size();
+ for (final CoordinatorActionBean w : coaBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_BUNDLE_JOBS");
- List<BundleJobBean> bjBeans = q.getResultList();
- int bjSize = bjBeans.size();
- for (BundleJobBean w : bjBeans) {
+ final List<BundleJobBean> bjBeans = q.getResultList();
+ final int bjSize = bjBeans.size();
+ for (final BundleJobBean w : bjBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_BUNDLE_ACTIONS");
- List<BundleActionBean> baBeans = q.getResultList();
- int baSize = baBeans.size();
- for (BundleActionBean w : baBeans) {
+ final List<BundleActionBean> baBeans = q.getResultList();
+ final int baSize = baBeans.size();
+ for (final BundleActionBean w : baBeans) {
entityManager.remove(w);
}
q = entityManager.createNamedQuery("GET_SLA_EVENTS");
- List<SLAEventBean> slaBeans = q.getResultList();
- int slaSize = slaBeans.size();
- for (SLAEventBean w : slaBeans) {
+ final List<SLAEventBean> slaBeans = q.getResultList();
+ final int slaSize = slaBeans.size();
+ for (final SLAEventBean w : slaBeans) {
entityManager.remove(w);
}
q = entityManager.createQuery("select OBJECT(w) from SLARegistrationBean w");
- List<SLARegistrationBean> slaRegBeans = q.getResultList();
- int slaRegSize = slaRegBeans.size();
- for (SLARegistrationBean w : slaRegBeans) {
+ final List<SLARegistrationBean> slaRegBeans = q.getResultList();
+ final int slaRegSize = slaRegBeans.size();
+ for (final SLARegistrationBean w : slaRegBeans) {
entityManager.remove(w);
}
q = entityManager.createQuery("select OBJECT(w) from SLASummaryBean w");
- List<SLASummaryBean> sdBeans = q.getResultList();
- int ssSize = sdBeans.size();
- for (SLASummaryBean w : sdBeans) {
+ final List<SLASummaryBean> sdBeans = q.getResultList();
+ final int ssSize = sdBeans.size();
+ for (final SLASummaryBean w : sdBeans) {
entityManager.remove(w);
}
@@ -908,58 +901,49 @@ public abstract class XTestCase extends TestCase {
private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null;
- // TODO: OYA: replace with MiniYarnCluster or MiniMRYarnCluster
- private static MiniMRCluster mrCluster = null;
+ private static MiniYARNCluster yarnCluster = null;
private static MiniHCatServer hcatServer = null;
private static MiniHS2 hiveserver2 = null;
private static HiveConf hs2Config = null;
- private void setUpEmbeddedHadoop(String testCaseDir) throws Exception {
- if (dfsCluster == null && mrCluster == null) {
- if (System.getProperty("hadoop.log.dir") == null) {
- System.setProperty("hadoop.log.dir", testCaseDir);
- }
+ private void setUpEmbeddedHadoop(final String testCaseDir) throws Exception {
+ if (dfsCluster == null && yarnCluster == null) {
+ if (System.getProperty("hadoop.log.dir") == null) {
+ System.setProperty("hadoop.log.dir", testCaseDir);
+ }
// Tell the ClasspathUtils that we're using a mini cluster
ClasspathUtils.setUsingMiniYarnCluster(true);
- int taskTrackers = 2;
- int dataNodes = 2;
- String oozieUser = getOozieUser();
- JobConf conf = createDFSConfig();
- String[] userGroups = new String[] { getTestGroup(), getTestGroup2() };
+ final int dataNodes = 2;
+ final String oozieUser = getOozieUser();
+ final JobConf dfsConfig = createDFSConfig();
+ final String[] userGroups = new String[]{getTestGroup(), getTestGroup2()};
UserGroupInformation.createUserForTesting(oozieUser, userGroups);
UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
- UserGroupInformation.createUserForTesting(getTestUser3(), new String[] { "users" } );
+ UserGroupInformation.createUserForTesting(getTestUser3(), new String[]{"users"});
try {
- dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
- FileSystem fileSystem = dfsCluster.getFileSystem();
- fileSystem.mkdirs(new Path("target/test-data"));
- fileSystem.mkdirs(new Path("target/test-data"+"/minicluster/mapred"));
- fileSystem.mkdirs(new Path("/user"));
- fileSystem.mkdirs(new Path("/tmp"));
- fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
- fileSystem.setPermission(new Path("target/test-data"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("target/test-data"+"/minicluster"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("target/test-data"+"/minicluster/mapred"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
- fileSystem.setPermission(new Path("/hadoop/mapred/system"), FsPermission.valueOf("-rwx------"));
- String nnURI = fileSystem.getUri().toString();
- int numDirs = 1;
- String[] racks = null;
- String[] hosts = null;
- mrCluster = new MiniMRCluster(0, 0, taskTrackers, nnURI, numDirs, racks, hosts, null, conf);
- JobConf jobConf = mrCluster.createJobConf();
+ dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
+ .numDataNodes(dataNodes)
+ .format(true)
+ .racks(null)
+ .build();
+
+ createHdfsPathsAndSetupPermissions();
+
+ final Configuration yarnConfig = createYarnConfig(dfsConfig);
+ yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
+ yarnCluster.init(yarnConfig);
+ yarnCluster.start();
+ final JobConf jobConf = new JobConf(yarnCluster.getConfig());
System.setProperty(OOZIE_TEST_JOB_TRACKER, jobConf.get("mapred.job.tracker"));
- String rmAddress = jobConf.get("yarn.resourcemanager.address");
+ final String rmAddress = jobConf.get("yarn.resourcemanager.address");
if (rmAddress != null) {
System.setProperty(OOZIE_TEST_JOB_TRACKER, rmAddress);
}
- System.setProperty(OOZIE_TEST_NAME_NODE, jobConf.get("fs.default.name"));
- ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
- }
- catch (Exception ex) {
+ System.setProperty(OOZIE_TEST_NAME_NODE, dfsCluster.getFileSystem().getUri().toString());
+ ProxyUsers.refreshSuperUserGroupsConfiguration(dfsConfig);
+ } catch (final Exception ex) {
shutdownMiniCluster();
throw ex;
}
@@ -967,15 +951,32 @@ public abstract class XTestCase extends TestCase {
}
}
+ private void createHdfsPathsAndSetupPermissions() throws IOException {
+ final FileSystem fileSystem = dfsCluster.getFileSystem();
+
+ fileSystem.mkdirs(new Path("target/test-data"));
+ fileSystem.mkdirs(new Path("target/test-data" + "/minicluster/mapred"));
+ fileSystem.mkdirs(new Path("/user"));
+ fileSystem.mkdirs(new Path("/tmp"));
+ fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
+
+ fileSystem.setPermission(new Path("target/test-data"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("target/test-data" + "/minicluster"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("target/test-data" + "/minicluster/mapred"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/hadoop/mapred/system"), FsPermission.valueOf("-rwx------"));
+ }
+
private void setUpEmbeddedHadoop2() throws Exception {
if (dfsCluster != null && dfsCluster2 == null) {
// Trick dfs location for MiniDFSCluster since it doesn't accept location as input)
- String testBuildDataSaved = System.getProperty("test.build.data", "build/test/data");
+ final String testBuildDataSaved = System.getProperty("test.build.data", "build/test/data");
try {
System.setProperty("test.build.data", FilenameUtils.concat(testBuildDataSaved, "2"));
// Only DFS cluster is created based upon current need
dfsCluster2 = new MiniDFSCluster(createDFSConfig(), 2, true, null);
- FileSystem fileSystem = dfsCluster2.getFileSystem();
+ final FileSystem fileSystem = dfsCluster2.getFileSystem();
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("/user"));
fileSystem.mkdirs(new Path("/tmp"));
@@ -983,12 +984,10 @@ public abstract class XTestCase extends TestCase {
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
System.setProperty(OOZIE_TEST_NAME_NODE2, fileSystem.getConf().get("fs.default.name"));
- }
- catch (Exception ex) {
+ } catch (final Exception ex) {
shutdownMiniCluster2();
throw ex;
- }
- finally {
+ } finally {
// Restore previus value
System.setProperty("test.build.data", testBuildDataSaved);
}
@@ -996,31 +995,41 @@ public abstract class XTestCase extends TestCase {
}
private JobConf createDFSConfig() throws UnknownHostException {
- JobConf conf = new JobConf();
- conf.set("dfs.block.access.token.enable", "false");
- conf.set("dfs.permissions", "true");
- conf.set("hadoop.security.authentication", "simple");
-
- //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
- StringBuilder sb = new StringBuilder();
- sb.append("127.0.0.1,localhost");
- for (InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
- sb.append(",").append(i.getCanonicalHostName());
- }
- conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts", sb.toString());
-
- conf.set("hadoop.proxyuser." + getOozieUser() + ".groups", getTestGroup());
- conf.set("mapred.tasktracker.map.tasks.maximum", "4");
- conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
-
- conf.set("hadoop.tmp.dir", "target/test-data"+"/minicluster");
-
- // Scheduler properties required for YARN CapacityScheduler to work
- conf.set("yarn.scheduler.capacity.root.queues", "default");
- conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
- // Required to prevent deadlocks with YARN CapacityScheduler
- conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
- return conf;
+ final JobConf conf = new JobConf();
+ conf.set("dfs.block.access.token.enable", "false");
+ conf.set("dfs.permissions", "true");
+ conf.set("hadoop.security.authentication", "simple");
+
+ //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
+ final StringBuilder sb = new StringBuilder();
+ sb.append("127.0.0.1,localhost");
+ for (final InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
+ sb.append(",").append(i.getCanonicalHostName());
+ }
+ conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts", sb.toString());
+
+ conf.set("hadoop.proxyuser." + getOozieUser() + ".groups", getTestGroup());
+ conf.set("mapred.tasktracker.map.tasks.maximum", "4");
+ conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
+
+ conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
+
+ // Scheduler properties required for YARN CapacityScheduler to work
+ conf.set("yarn.scheduler.capacity.root.queues", "default");
+ conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+ // Required to prevent deadlocks with YARN CapacityScheduler
+ conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
+
+ return conf;
+ }
+
+ private Configuration createYarnConfig(final Configuration parentConfig) {
+ final Configuration yarnConfig = new YarnConfiguration(parentConfig);
+
+ yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
+ yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
+
+ return yarnConfig;
}
protected void setupHCatalogServer() throws Exception {
@@ -1047,8 +1056,8 @@ public abstract class XTestCase extends TestCase {
if (hs2Config == null) {
// Make HS2 use our Mini cluster by copying all configs to HiveConf; also had to hack MiniHS2
hs2Config = new HiveConf();
- Configuration jobConf = createJobConf();
- for (Map.Entry<String, String> pair : jobConf) {
+ final Configuration jobConf = createJobConf();
+ for (final Map.Entry<String, String> pair : jobConf) {
hs2Config.set(pair.getKey(), pair.getValue());
}
}
@@ -1070,25 +1079,23 @@ public abstract class XTestCase extends TestCase {
return hiveserver2.getJdbcURL();
}
- protected String getHiveServer2JdbcURL(String dbName) {
+ protected String getHiveServer2JdbcURL(final String dbName) {
return hiveserver2.getJdbcURL(dbName);
}
private static void shutdownMiniCluster() {
try {
- if (mrCluster != null) {
- mrCluster.shutdown();
+ if (yarnCluster != null) {
+ yarnCluster.stop();
}
- }
- catch (Exception ex) {
+ } catch (final Exception ex) {
System.out.println(ex);
}
try {
if (dfsCluster != null) {
dfsCluster.shutdown();
}
- }
- catch (Exception ex) {
+ } catch (final Exception ex) {
System.out.println(ex);
}
// This is tied to the MiniCluster because it inherits configs from there
@@ -1100,11 +1107,11 @@ public abstract class XTestCase extends TestCase {
if (dfsCluster2 != null) {
dfsCluster2.shutdown();
}
- }
- catch (Exception ex) {
+ } catch (final Exception ex) {
System.out.println(ex);
}
}
+
private static final AtomicLong LAST_TESTCASE_FINISHED = new AtomicLong();
private static final AtomicInteger RUNNING_TESTCASES = new AtomicInteger();
@@ -1115,7 +1122,7 @@ public abstract class XTestCase extends TestCase {
}
public void run() {
- long shutdownWait = Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
+ final long shutdownWait = Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
LAST_TESTCASE_FINISHED.set(System.currentTimeMillis());
while (true) {
if (RUNNING_TESTCASES.get() == 0) {
@@ -1125,8 +1132,7 @@ public abstract class XTestCase extends TestCase {
}
try {
Thread.sleep(1000);
- }
- catch (InterruptedException ex) {
+ } catch (final InterruptedException ex) {
break;
}
}
@@ -1136,10 +1142,10 @@ public abstract class XTestCase extends TestCase {
}
@SuppressWarnings("deprecation")
- private JobConf createJobConfFromMRCluster() {
- JobConf jobConf = new JobConf();
- JobConf jobConfMR = mrCluster.createJobConf();
- for ( Entry<String, String> entry : jobConfMR) {
+ private JobConf createJobConfFromYarnCluster() {
+ final JobConf jobConf = new JobConf();
+ final JobConf jobConfYarn = new JobConf(yarnCluster.getConfig());
+ for (final Entry<String, String> entry : jobConfYarn) {
// MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
// TODO call conf.unset after moving completely to Hadoop 2.x
if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
@@ -1154,15 +1160,16 @@ public abstract class XTestCase extends TestCase {
* @return a jobconf preconfigured to talk with the test cluster/minicluster.
*/
protected JobConf createJobConf() throws IOException {
- JobConf jobConf;
- if (mrCluster != null) {
- jobConf = createJobConfFromMRCluster();
- }
- else {
+ final JobConf jobConf;
+
+ if (yarnCluster != null) {
+ jobConf = createJobConfFromYarnCluster();
+ } else {
jobConf = new JobConf();
jobConf.set("mapred.job.tracker", getJobTrackerUri());
jobConf.set("fs.default.name", getNameNodeUri());
}
+
return jobConf;
}
@@ -1185,29 +1192,22 @@ public abstract class XTestCase extends TestCase {
*
* @param executable The ShutdownJobTrackerExecutable to execute while the JobTracker is shutdown
*/
- protected void executeWhileJobTrackerIsShutdown(ShutdownJobTrackerExecutable executable) {
- mrCluster.stopJobTracker();
- Exception ex = null;
+ protected void executeWhileJobTrackerIsShutdown(final ShutdownJobTrackerExecutable executable) {
try {
executable.execute();
- } catch (Exception e) {
- ex = e;
- } finally {
- mrCluster.startJobTracker();
- }
- if (ex != null) {
- throw new RuntimeException(ex);
+ } catch (final Exception e) {
+ throw new RuntimeException(e);
}
}
protected Services setupServicesForHCatalog() throws ServiceException {
- Services services = new Services();
+ final Services services = new Services();
setupServicesForHCataLogImpl(services);
return services;
}
- private void setupServicesForHCataLogImpl(Services services) {
- Configuration conf = services.getConf();
+ private void setupServicesForHCataLogImpl(final Services services) {
+ final Configuration conf = services.getConf();
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," +
PartitionDependencyManagerService.class.getName() + "," +
@@ -1215,31 +1215,31 @@ public abstract class XTestCase extends TestCase {
conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
"default=java.naming.factory.initial#" + ActiveMQConnFactory + ";" +
"java.naming.provider.url#" + localActiveMQBroker +
- "connectionFactoryNames#"+ "ConnectionFactory");
+ "connectionFactoryNames#" + "ConnectionFactory");
conf.set(URIHandlerService.URI_HANDLERS,
FSURIHandler.class.getName() + "," + HCatURIHandler.class.getName());
setSystemProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
setSystemProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
}
- protected Services setupServicesForHCatalog(Services services) throws ServiceException {
+ protected Services setupServicesForHCatalog(final Services services) throws ServiceException {
setupServicesForHCataLogImpl(services);
return services;
}
- protected YarnApplicationState waitUntilYarnAppState(String externalId, final EnumSet<YarnApplicationState> acceptedStates)
+ protected YarnApplicationState waitUntilYarnAppState(final String externalId, final EnumSet<YarnApplicationState> acceptedStates)
throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
- JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
final YarnClient yarnClient = Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), jobConf);
try {
waitFor(60 * 1000, new Predicate() {
@Override
public boolean evaluate() throws Exception {
- YarnApplicationState state = yarnClient.getApplicationReport(appId).getYarnApplicationState();
+ final YarnApplicationState state = yarnClient.getApplicationReport(appId).getYarnApplicationState();
finalState.setValue(state);
return acceptedStates.contains(state);
@@ -1255,20 +1255,20 @@ public abstract class XTestCase extends TestCase {
return finalState.getValue();
}
- protected void waitUntilYarnAppDoneAndAssertSuccess(String externalId) throws HadoopAccessorException, IOException, YarnException {
- YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ protected void waitUntilYarnAppDoneAndAssertSuccess(final String externalId) throws HadoopAccessorException, IOException, YarnException {
+ final YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
assertEquals("YARN App state", YarnApplicationState.FINISHED, state);
}
- protected void waitUntilYarnAppKilledAndAssertSuccess(String externalId) throws HadoopAccessorException, IOException, YarnException {
- YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ protected void waitUntilYarnAppKilledAndAssertSuccess(final String externalId) throws HadoopAccessorException, IOException, YarnException {
+ final YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
assertEquals("YARN App state", YarnApplicationState.KILLED, state);
}
protected YarnApplicationState getYarnApplicationState(String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
- JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
// This is needed here because we need a mutable final YarnClient
final MutableObject<YarnClient> yarnClientMO = new MutableObject<YarnClient>(null);
try {
[41/48] oozie git commit: Revert "OOZIE-2729 Kill all actions after
each unit test. This fixes the scenario when multiple actions remained
RUNNING during execution of a test suite and had been consuming resources
(memory, vcores)."
Posted by pb...@apache.org.
Revert "OOZIE-2729 Kill all actions after each unit test. This fixes the scenario when multiple actions remained RUNNING during execution of a test suite and had been consuming resources (memory, vcores)."
This reverts commit a67128165d067c15483acf3ed80531dbe3f6970e.
Change-Id: I4213d541a5854cbf696c3980863a933e2a69c1c8
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/6789d362
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/6789d362
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/6789d362
Branch: refs/heads/oya
Commit: 6789d362f82451a542ba610027c2021344b1e7f9
Parents: 6cd07c2
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Tue Nov 29 12:27:42 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Tue Nov 29 12:27:42 2016 +0100
----------------------------------------------------------------------
.../wf/TestWorkflowActionKillXCommand.java | 12 +-
.../java/org/apache/oozie/test/XTestCase.java | 8 +-
.../apache/oozie/action/hadoop/DistcpMain.java | 2 +-
.../apache/oozie/action/hadoop/HiveMain.java | 2 +-
.../apache/oozie/action/hadoop/Hive2Main.java | 2 +-
.../apache/oozie/action/hadoop/JavaMain.java | 2 +-
.../oozie/action/hadoop/LauncherMain.java | 76 ++++++++
.../oozie/action/hadoop/MapReduceMain.java | 2 +-
.../oozie/action/hadoop/YarnJobActions.java | 181 -------------------
.../org/apache/oozie/action/hadoop/PigMain.java | 2 +-
.../apache/oozie/action/hadoop/SparkMain.java | 2 +-
.../apache/oozie/action/hadoop/SqoopMain.java | 2 +-
12 files changed, 91 insertions(+), 202 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index 05ceb90..ef75f14 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -31,7 +31,6 @@ import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.hadoop.LauncherMain;
import org.apache.oozie.action.hadoop.MapperReducerForTest;
-import org.apache.oozie.action.hadoop.YarnJobActions;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.executor.jpa.JPAExecutorException;
@@ -44,6 +43,8 @@ import org.apache.oozie.service.UUIDService;
import org.apache.oozie.test.XDataTestCase;
import org.apache.oozie.workflow.WorkflowInstance;
+import com.google.common.collect.Sets;
+
public class TestWorkflowActionKillXCommand extends XDataTestCase {
private Services services;
@@ -171,13 +172,8 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
System.setProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME, String.valueOf(System.currentTimeMillis()));
jobClient.submitJob(jobConf);
-
- final Set<ApplicationId> apps = new YarnJobActions.Builder(jobConf, ApplicationsRequestScope.ALL)
- .checkApplicationTags(true)
- .checkStartRange(true)
- .build()
- .getYarnJobs();
-
+ Set<ApplicationId> apps = Sets.newHashSet();
+ apps = LauncherMain.getChildYarnJobs(jobConf, ApplicationsRequestScope.ALL);
assertEquals("Number of YARN apps", apps.size(), 1);
sleepjob.close();
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 711d41d..020d8d1 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -916,9 +916,9 @@ public abstract class XTestCase extends TestCase {
private void setUpEmbeddedHadoop(String testCaseDir) throws Exception {
if (dfsCluster == null && mrCluster == null) {
- if (System.getProperty("hadoop.log.dir") == null) {
- System.setProperty("hadoop.log.dir", testCaseDir);
- }
+ if (System.getProperty("hadoop.log.dir") == null) {
+ System.setProperty("hadoop.log.dir", testCaseDir);
+ }
// Tell the ClasspathUtils that we're using a mini cluster
ClasspathUtils.setUsingMiniYarnCluster(true);
int taskTrackers = 2;
@@ -1083,7 +1083,6 @@ public abstract class XTestCase extends TestCase {
catch (Exception ex) {
System.out.println(ex);
}
-
try {
if (dfsCluster != null) {
dfsCluster.shutdown();
@@ -1311,4 +1310,3 @@ public abstract class XTestCase extends TestCase {
}
}
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------
diff --git a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
index fc37e3c..40c81ba 100644
--- a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
+++ b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
@@ -51,7 +51,7 @@ public class DistcpMain extends JavaMain {
protected void run(String[] args) throws Exception {
Configuration actionConf = loadActionConf();
- YarnJobActions.killChildYarnJobs(actionConf);
+ LauncherMain.killChildYarnJobs(actionConf);
String logFile = setUpDistcpLog4J(actionConf);
Class<?> klass = actionConf.getClass(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS,
org.apache.hadoop.tools.DistCp.class);
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
index 320dbd5..242cd6c 100644
--- a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
+++ b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
@@ -306,7 +306,7 @@ public class HiveMain extends LauncherMain {
}
System.out.println();
- YarnJobActions.killChildYarnJobs(hiveConf);
+ LauncherMain.killChildYarnJobs(hiveConf);
System.out.println("=================================================================");
System.out.println();
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
index 8bf233f..b418b89 100644
--- a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
+++ b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
@@ -229,7 +229,7 @@ public class Hive2Main extends LauncherMain {
}
System.out.println();
- YarnJobActions.killChildYarnJobs(actionConf);
+ LauncherMain.killChildYarnJobs(actionConf);
System.out.println("=================================================================");
System.out.println();
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
index e9c5585..0815318 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
@@ -44,7 +44,7 @@ public class JavaMain extends LauncherMain {
setApplicationTags(actionConf, TEZ_APPLICATION_TAGS);
setApplicationTags(actionConf, SPARK_YARN_TAGS);
- YarnJobActions.killChildYarnJobs(actionConf);
+ LauncherMain.killChildYarnJobs(actionConf);
Class<?> klass = actionConf.getClass(JAVA_MAIN_CLASS, Object.class);
System.out.println("Java action main class : " + klass.getName());
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
index 0ce2055..31200af 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
@@ -125,6 +125,82 @@ public abstract class LauncherMain {
}
}
+ public static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
+ return getChildYarnJobs(actionConf, ApplicationsRequestScope.OWN);
+ }
+
+ public static Set<ApplicationId> getChildYarnJobs(Configuration actionConf, ApplicationsRequestScope scope) {
+ System.out.println("Fetching child yarn jobs");
+ Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
+ String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
+ if (tag == null) {
+ System.out.print("Could not find Yarn tags property " + CHILD_MAPREDUCE_JOB_TAGS);
+ return childYarnJobs;
+ }
+ System.out.println("tag id : " + tag);
+ long startTime = 0L;
+ try {
+ startTime = Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
+ } catch(NumberFormatException nfe) {
+ throw new RuntimeException("Could not find Oozie job launch time", nfe);
+ }
+
+ GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
+ gar.setScope(scope);
+ gar.setApplicationTags(Collections.singleton(tag));
+
+ long endTime = System.currentTimeMillis();
+ if (startTime > endTime) {
+ System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
+ "Attempting to work around...");
+ // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
+ // offset in both directions
+ long diff = 2 * (startTime - endTime);
+ startTime = startTime - diff;
+ endTime = endTime + diff;
+ }
+ gar.setStartRange(startTime, endTime);
+ try {
+ ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(actionConf, ApplicationClientProtocol.class);
+ GetApplicationsResponse apps = proxy.getApplications(gar);
+ List<ApplicationReport> appsList = apps.getApplicationList();
+ for(ApplicationReport appReport : appsList) {
+ childYarnJobs.add(appReport.getApplicationId());
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException("Exception occurred while finding child jobs", ioe);
+ } catch (YarnException ye) {
+ throw new RuntimeException("Exception occurred while finding child jobs", ye);
+ }
+
+ System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
+ return childYarnJobs;
+ }
+
+ public static void killChildYarnJobs(Configuration actionConf) {
+ try {
+ Set<ApplicationId> childYarnJobs = getChildYarnJobs(actionConf);
+ if (!childYarnJobs.isEmpty()) {
+ System.out.println();
+ System.out.println("Found [" + childYarnJobs.size() + "] Map-Reduce jobs from this launcher");
+ System.out.println("Killing existing jobs and starting over:");
+ YarnClient yarnClient = YarnClient.createYarnClient();
+ yarnClient.init(actionConf);
+ yarnClient.start();
+ for (ApplicationId app : childYarnJobs) {
+ System.out.print("Killing job [" + app + "] ... ");
+ yarnClient.killApplication(app);
+ System.out.println("Done");
+ }
+ System.out.println();
+ }
+ } catch (YarnException ye) {
+ throw new RuntimeException("Exception occurred while killing child job(s)", ye);
+ } catch (IOException ioe) {
+ throw new RuntimeException("Exception occurred while killing child job(s)", ioe);
+ }
+ }
+
protected abstract void run(String[] args) throws Exception;
/**
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
index ba7324f..16cf4b1 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
@@ -50,7 +50,7 @@ public class MapReduceMain extends LauncherMain {
JobConf jobConf = new JobConf();
addActionConf(jobConf, actionConf);
- YarnJobActions.killChildYarnJobs(jobConf);
+ LauncherMain.killChildYarnJobs(jobConf);
// Run a config class if given to update the job conf
runConfigClass(jobConf);
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
deleted file mode 100644
index 0224362..0000000
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import com.google.common.collect.Sets;
-import com.google.common.io.Closeables;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.client.ClientRMProxy;
-import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-
-public class YarnJobActions {
- private final Configuration configuration;
- private final ApplicationsRequestScope scope;
- private final boolean checkApplicationTags;
- private final boolean checkStartRange;
-
- private YarnJobActions(final Configuration configuration,
- final ApplicationsRequestScope scope,
- final boolean checkApplicationTags,
- final boolean checkStartRange) {
- this.configuration = configuration;
- this.scope = scope;
- this.checkApplicationTags = checkApplicationTags;
- this.checkStartRange = checkStartRange;
- }
-
- public Set<ApplicationId> getYarnJobs() {
- System.out.println(String.format("Fetching yarn jobs. [scope=%s;checkApplicationTags=%s;checkStartRange=%s]",
- scope, checkApplicationTags, checkStartRange));
-
- final Set<ApplicationId> childYarnJobs = Sets.newHashSet();
- final GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
- gar.setScope(scope);
-
- if (checkApplicationTags) {
- final String tag = configuration.get(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS);
- if (tag == null) {
- System.out.println("Could not find Yarn tags property " + LauncherMain.CHILD_MAPREDUCE_JOB_TAGS);
- return childYarnJobs;
- }
- System.out.println("tag id : " + tag);
- gar.setApplicationTags(Collections.singleton(tag));
- }
-
- if (checkStartRange) {
- long startTime;
- try {
- startTime = Long.parseLong(System.getProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME));
- } catch (final NumberFormatException nfe) {
- throw new RuntimeException("Could not find Oozie job launch time", nfe);
- }
-
-
- long endTime = System.currentTimeMillis();
- if (startTime > endTime) {
- System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
- "Attempting to work around...");
- // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
- // offset in both directions
- final long diff = 2 * (startTime - endTime);
- startTime = startTime - diff;
- endTime = endTime + diff;
- }
- gar.setStartRange(startTime, endTime);
- }
-
- try {
- final ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(configuration, ApplicationClientProtocol.class);
- final GetApplicationsResponse apps = proxy.getApplications(gar);
- final List<ApplicationReport> appsList = apps.getApplicationList();
- for (final ApplicationReport appReport : appsList) {
- childYarnJobs.add(appReport.getApplicationId());
- }
- } catch (final IOException | YarnException e) {
- throw new RuntimeException("Exception occurred while finding child jobs", e);
- }
-
- System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
- return childYarnJobs;
- }
-
- static void killChildYarnJobs(final Configuration actionConf) {
- final YarnJobActions yarnJobActions = new Builder(actionConf, ApplicationsRequestScope.OWN)
- .build();
- final Set<ApplicationId> childYarnJobs = yarnJobActions.getYarnJobs();
-
- yarnJobActions.killSelectedYarnJobs(childYarnJobs);
- }
-
- public void killSelectedYarnJobs(final Set<ApplicationId> selectedApplicationIds) {
- final YarnClient yarnClient = createYarnClient();
-
- try {
- if (!selectedApplicationIds.isEmpty()) {
- System.out.println("");
- System.out.println("Found [" + selectedApplicationIds.size() + "] Map-Reduce jobs from this launcher");
- System.out.println("Killing existing jobs and starting over:");
-
- for (final ApplicationId app : selectedApplicationIds) {
- System.out.println("Killing job [" + app + "] ... ");
-
- yarnClient.killApplication(app);
-
- System.out.println("Done");
- }
-
- System.out.println("");
- }
- } catch (final YarnException | IOException e) {
- throw new RuntimeException("Exception occurred while killing child job(s)", e);
- } finally {
- Closeables.closeQuietly(yarnClient);
- }
- }
-
- private YarnClient createYarnClient() {
- final YarnClient yarnClient = YarnClient.createYarnClient();
-
- yarnClient.init(configuration);
- yarnClient.start();
-
- return yarnClient;
- }
-
- public static class Builder {
- private final Configuration configuration;
- private final ApplicationsRequestScope scope;
- private boolean checkApplicationTags = false;
- private boolean checkStartRange = false;
-
- public Builder(final Configuration configuration, final ApplicationsRequestScope scope) {
- this.configuration = configuration;
- this.scope = scope;
- }
-
- public Builder checkApplicationTags(final boolean checkApplicationTags) {
- this.checkApplicationTags = checkApplicationTags;
-
- return this;
- }
-
- public Builder checkStartRange(final boolean checkStartRange) {
- this.checkStartRange = checkStartRange;
-
- return this;
- }
-
- public YarnJobActions build() {
- return new YarnJobActions(configuration, scope, checkApplicationTags, checkStartRange);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index 98929c0..0029dd0 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -217,7 +217,7 @@ public class PigMain extends LauncherMain {
System.out.println(" " + arg);
}
- YarnJobActions.killChildYarnJobs(actionConf);
+ LauncherMain.killChildYarnJobs(actionConf);
System.out.println("=================================================================");
System.out.println();
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
index 7f35cc1..f278075 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
@@ -85,7 +85,7 @@ public class SparkMain extends LauncherMain {
prepareHadoopConfig(actionConf);
setYarnTag(actionConf);
- YarnJobActions.killChildYarnJobs(actionConf);
+ LauncherMain.killChildYarnJobs(actionConf);
String logFile = setUpSparkLog4J(actionConf);
List<String> sparkArgs = new ArrayList<String>();
http://git-wip-us.apache.org/repos/asf/oozie/blob/6789d362/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
index 29e2966..92c960f 100644
--- a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
+++ b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
@@ -171,7 +171,7 @@ public class SqoopMain extends LauncherMain {
System.out.println(" " + arg);
}
- YarnJobActions.killChildYarnJobs(sqoopConf);
+ LauncherMain.killChildYarnJobs(sqoopConf);
System.out.println("=================================================================");
System.out.println();
[37/48] oozie git commit: Revert "OOZIE-2729 OYA: refactor XTestCase."
Posted by pb...@apache.org.
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 011bd0b..784c578 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -21,14 +21,22 @@ package org.apache.oozie.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
+import java.io.FileReader;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
+import java.net.InetAddress;
import java.net.URL;
import java.util.*;
+import java.net.UnknownHostException;
+import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
+import javax.persistence.EntityManager;
+import javax.persistence.FlushModeType;
+import javax.persistence.Query;
+
import junit.framework.TestCase;
import org.apache.commons.io.FilenameUtils;
@@ -43,21 +51,38 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
+import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
-import org.apache.oozie.action.hadoop.YarnJobActions;
+import org.apache.oozie.BundleActionBean;
+import org.apache.oozie.BundleJobBean;
+import org.apache.oozie.CoordinatorActionBean;
+import org.apache.oozie.CoordinatorJobBean;
+import org.apache.oozie.SLAEventBean;
+import org.apache.oozie.WorkflowActionBean;
+import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.dependency.FSURIHandler;
+import org.apache.oozie.dependency.HCatURIHandler;
import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.HCatAccessorService;
import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.JMSAccessorService;
import org.apache.oozie.service.JPAService;
+import org.apache.oozie.service.PartitionDependencyManagerService;
import org.apache.oozie.service.ServiceException;
import org.apache.oozie.service.Services;
+import org.apache.oozie.service.StoreService;
+import org.apache.oozie.service.URIHandlerService;
+import org.apache.oozie.sla.SLARegistrationBean;
+import org.apache.oozie.sla.SLASummaryBean;
+import org.apache.oozie.store.StoreException;
import org.apache.oozie.test.MiniHCatServer.RUNMODE;
import org.apache.oozie.test.hive.MiniHS2;
import org.apache.oozie.util.ClasspathUtils;
@@ -83,24 +108,169 @@ import org.apache.oozie.util.XLog;
* From within testcases, system properties must be changed using the {@link #setSystemProperty} method.
*/
public abstract class XTestCase extends TestCase {
- private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES =
- EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
- protected static final File OOZIE_SRC_DIR = new TestPropertiesLoader().loadTestPropertiesOrThrow();
- private final TestCaseDirectories testCaseDirectories = new TestCaseDirectories();
- private final TestSystemProperties testSystemProperties = new TestSystemProperties();
-
- private final TestConfigurations testConfigurations = new TestConfigurations();
+ private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES = EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
+ private Map<String, String> sysProps;
private String testCaseDir;
-
private String testCaseConfDir;
private String hadoopVersion;
protected XLog log = new XLog(LogFactory.getLog(getClass()));
- static final String OOZIE_TEST_PROPERTIES = "oozie.test.properties";
+ protected static File OOZIE_SRC_DIR = null;
+ private static final String OOZIE_TEST_PROPERTIES = "oozie.test.properties";
protected static final String SYSTEM_LINE_SEPARATOR = System.getProperty("line.separator");
- protected static float WAITFOR_RATIO = Float.parseFloat(System.getProperty("oozie.test.waitfor.ratio", "1"));
- protected static final String LOCAL_ACTIVE_MQ_BROKER = "vm://localhost?broker.persistent=false";
- protected static final String ACTIVE_MQ_CONN_FACTORY = "org.apache.activemq.jndi.ActiveMQInitialContextFactory";
+
+ public static float WAITFOR_RATIO = Float.parseFloat(System.getProperty("oozie.test.waitfor.ratio", "1"));
+ protected static final String localActiveMQBroker = "vm://localhost?broker.persistent=false";
+ protected static final String ActiveMQConnFactory = "org.apache.activemq.jndi.ActiveMQInitialContextFactory";
+
+ static {
+ try {
+ OOZIE_SRC_DIR = new File("core").getAbsoluteFile();
+ if (!OOZIE_SRC_DIR.exists()) {
+ OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile().getParentFile();
+ OOZIE_SRC_DIR = new File(OOZIE_SRC_DIR, "core");
+ }
+ if (!OOZIE_SRC_DIR.exists()) {
+ OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile().getParentFile();
+ OOZIE_SRC_DIR = new File(OOZIE_SRC_DIR, "core");
+ }
+ if (!OOZIE_SRC_DIR.exists()) {
+ // We're probably being run from outside of Oozie (e.g. MiniOozie), so just use a dummy location here.
+ // Anything that uses this location should have a fallback anyway.
+ OOZIE_SRC_DIR = new File(".");
+ } else {
+ OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile();
+ }
+
+ final String testPropsFile = System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
+ final File file = new File(testPropsFile).isAbsolute()
+ ? new File(testPropsFile) : new File(OOZIE_SRC_DIR, testPropsFile);
+ if (file.exists()) {
+ System.out.println();
+ System.out.println("*********************************************************************************");
+ System.out.println("Loading test system properties from: " + file.getAbsolutePath());
+ System.out.println();
+ final Properties props = new Properties();
+ props.load(new FileReader(file));
+ for (final Map.Entry entry : props.entrySet()) {
+ if (!System.getProperties().containsKey(entry.getKey())) {
+ System.setProperty((String) entry.getKey(), (String) entry.getValue());
+ System.out.println(entry.getKey() + " = " + entry.getValue());
+ }
+ else {
+ System.out.println(entry.getKey() + " IGNORED, using command line value = " +
+ System.getProperty((String) entry.getKey()));
+ }
+ }
+ System.out.println("*********************************************************************************");
+ System.out.println();
+ }
+ else {
+ if (System.getProperty(OOZIE_TEST_PROPERTIES) != null) {
+ System.err.println();
+ System.err.println("ERROR: Specified test file does not exist: " +
+ System.getProperty(OOZIE_TEST_PROPERTIES));
+ System.err.println();
+ System.exit(-1);
+ }
+ }
+ } catch (final IOException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ }
+
+ /**
+ * System property to specify the parent directory for the 'oozietests' directory to be used as base for all test
+ * working directories. </p> If this property is not set, the assumed value is '/tmp'.
+ */
+ public static final String OOZIE_TEST_DIR = "oozie.test.dir";
+
+ /**
+ * System property to specify the Hadoop Job Tracker to use for testing. </p> If this property is not set, the
+ * assumed value is 'locahost:9001'.
+ */
+ public static final String OOZIE_TEST_JOB_TRACKER = "oozie.test.job.tracker";
+
+ /**
+ * System property to specify the Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
+ * value is 'locahost:9000'.
+ */
+ public static final String OOZIE_TEST_NAME_NODE = "oozie.test.name.node";
+
+ /**
+ * System property to specify the second Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
+ * value is 'locahost:9100'.
+ */
+ public static final String OOZIE_TEST_NAME_NODE2 = "oozie.test.name.node2";
+
+ /**
+ * System property to specify the Hadoop Version to use for testing. </p> If this property is not set, the assumed
+ * value is "0.20.0"
+ */
+ public static final String HADOOP_VERSION = "hadoop.version";
+
+ /**
+ * System property that specifies the user that test oozie instance runs as.
+ * The value of this property defaults to the "${user.name} system property.
+ */
+ public static final String TEST_OOZIE_USER_PROP = "oozie.test.user.oozie";
+
+ /**
+ * System property that specifies the default test user name used by
+ * the tests. The defalt value of this property is <tt>test</tt>.
+ */
+ public static final String TEST_USER1_PROP = "oozie.test.user.test";
+
+ /**
+ * System property that specifies an auxilliary test user name used by the
+ * tests. The default value of this property is <tt>test2</tt>.
+ */
+ public static final String TEST_USER2_PROP = "oozie.test.user.test2";
+
+ /**
+ * System property that specifies another auxilliary test user name used by
+ * the tests. The default value of this property is <tt>test3</tt>.
+ */
+ public static final String TEST_USER3_PROP = "oozie.test.user.test3";
+
+ /**
+ * System property that specifies the test groiup used by the tests.
+ * The default value of this property is <tt>testg</tt>.
+ */
+ public static final String TEST_GROUP_PROP = "oozie.test.group";
+
+ /**
+ * System property that specifies the test groiup used by the tests.
+ * The default value of this property is <tt>testg</tt>.
+ */
+ public static final String TEST_GROUP_PROP2 = "oozie.test.group2";
+
+ /**
+ * System property that specifies the wait time, in seconds, between testcases before
+ * triggering a shutdown. The default value is 10 sec.
+ */
+ public static final String TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT = "oozie.test.minicluster.monitor.shutdown.wait";
+
+ /**
+ * Name of the shell command
+ */
+ protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd" : "bash";
+
+ /**
+ * Extension for shell script files
+ */
+ protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS) ? "cmd" : "sh";
+
+ /**
+ * Option for shell command to pass script files
+ */
+ protected static final String SHELL_COMMAND_SCRIPTFILE_OPTION = (Shell.WINDOWS) ? "/c" : "-c";
+
+ /**
+ * Minimal set of require Services for cleaning up the database ({@link JPAService} and {@link StoreService})
+ */
+ private static final String MINIMAL_SERVICES_FOR_DB_CLEANUP = JPAService.class.getName() + "," + StoreService.class.getName();
/**
* Initialize the test working directory. <p/> If it does not exist it creates it, if it already exists it deletes
@@ -124,52 +294,37 @@ public abstract class XTestCase extends TestCase {
protected void setUp(final boolean cleanUpDBTables) throws Exception {
RUNNING_TESTCASES.incrementAndGet();
super.setUp();
-
- testCaseDirectories.createTestDirOrError();
-
- hadoopVersion = System.getProperty(TestConstants.HADOOP_VERSION, "0.20.0");
- testCaseDir = testCaseDirectories.createTestCaseDir(this, true);
-
- setupOozieHome();
-
- testCaseConfDir = createTestCaseSubDir("conf");
-
- final InputStream oozieSiteSourceStream = loadTestOozieSiteOrError();
-
- setupOozieSiteConfiguration(oozieSiteSourceStream);
-
- final File hadoopConfDir = copyAndGetHadoopConfig();
-
- testSystemProperties.setupSystemProperties(testCaseDir);
- if (testSystemProperties.isEmbeddedHadoop()) {
- setUpEmbeddedHadoop(testCaseDir);
+ final String baseDir = System.getProperty(OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
+ String msg = null;
+ final File f = new File(baseDir);
+ if (!f.isAbsolute()) {
+ msg = XLog.format("System property [{0}]=[{1}] must be set to an absolute path", OOZIE_TEST_DIR, baseDir);
}
- if (testSystemProperties.isEmbeddedHadoop2()) {
- setUpEmbeddedHadoop2();
- }
-
- if (yarnCluster != null) {
- try (final OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"))) {
- final Configuration conf = testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
- conf.writeXml(os);
+ else {
+ if (baseDir.length() < 4) {
+ msg = XLog.format("System property [{0}]=[{1}] path must be at least 4 chars", OOZIE_TEST_DIR, baseDir);
}
}
-
- if (System.getProperty("oozie.test.metastore.server", "false").equals("true")) {
- setupHCatalogServer();
+ if (msg != null) {
+ System.err.println();
+ System.err.println(msg);
+ System.exit(-1);
}
-
- if (System.getProperty("oozie.test.hive.server.2", "false").equals("true")) {
- setupHiveServer2();
+ f.mkdirs();
+ if (!f.exists() || !f.isDirectory()) {
+ System.err.println();
+ System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
+ System.exit(-1);
}
+ hadoopVersion = System.getProperty(HADOOP_VERSION, "0.20.0");
+ testCaseDir = createTestCaseDir(this, true);
- // Cleanup any leftover database data to make sure we start each test with an empty database
- if (cleanUpDBTables) {
- cleanUpDBTables();
- }
- }
+ //setting up Oozie HOME and Oozie conf directory
+ setSystemProperty(Services.OOZIE_HOME_DIR, testCaseDir);
+ Services.setOozieHome();
+ testCaseConfDir = createTestCaseSubDir("conf");
- private InputStream loadTestOozieSiteOrError() throws IOException {
+ // load test Oozie site
final String oozieTestDB = System.getProperty("oozie.test.db", "hsqldb");
final String defaultOozieSize =
new File(OOZIE_SRC_DIR, "core/src/test/resources/" + oozieTestDB + "-oozie-site.xml").getAbsolutePath();
@@ -198,15 +353,6 @@ public abstract class XTestCase extends TestCase {
System.exit(-1);
}
}
- return oozieSiteSourceStream;
- }
-
- private void setupOozieHome() throws ServiceException {
- setSystemProperty(Services.OOZIE_HOME_DIR, testCaseDir);
- Services.setOozieHome();
- }
-
- private void setupOozieSiteConfiguration(final InputStream oozieSiteSourceStream) throws IOException {
// Copy the specified oozie-site file from oozieSiteSourceStream to the test case dir as oozie-site.xml
final Configuration oozieSiteConf = new Configuration(false);
oozieSiteConf.addResource(oozieSiteSourceStream);
@@ -219,16 +365,14 @@ public abstract class XTestCase extends TestCase {
oozieSiteConf.set(Services.CONF_SERVICE_CLASSES, classes.replaceAll("org.apache.oozie.service.ShareLibService,", ""));
// Make sure to create the Oozie DB during unit tests
oozieSiteConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
- final File target = new File(testCaseConfDir, "oozie-site.xml");
+ File target = new File(testCaseConfDir, "oozie-site.xml");
oozieSiteConf.writeXml(new FileOutputStream(target));
- }
- private File copyAndGetHadoopConfig() throws IOException {
final File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
hadoopConfDir.mkdir();
final File actionConfDir = new File(testCaseConfDir, "action-conf");
actionConfDir.mkdir();
- final File source = new File(OOZIE_SRC_DIR, "core/src/test/resources/hadoop-config.xml");
+ source = new File(OOZIE_SRC_DIR, "core/src/test/resources/hadoop-config.xml");
InputStream hadoopConfigResourceStream = null;
if (!source.exists()) {
// If we can't find it, try using the class loader (useful if we're using XTestCase from outside core)
@@ -247,9 +391,49 @@ public abstract class XTestCase extends TestCase {
} else {
hadoopConfigResourceStream = new FileInputStream(source);
}
- final File target = new File(hadoopConfDir, "hadoop-site.xml");
+ target = new File(hadoopConfDir, "hadoop-site.xml");
IOUtils.copyStream(hadoopConfigResourceStream, new FileOutputStream(target));
- return hadoopConfDir;
+
+ if (System.getProperty("oozielocal.log") == null) {
+ setSystemProperty("oozielocal.log", "/tmp/oozielocal.log");
+ }
+ if (System.getProperty("oozie.test.hadoop.security", "simple").equals("kerberos")) {
+ System.setProperty("oozie.service.HadoopAccessorService.kerberos.enabled", "true");
+ }
+ if (System.getProperty("oozie.test.hadoop.minicluster", "true").equals("true")) {
+ setUpEmbeddedHadoop(getTestCaseDir());
+ // Second cluster is not necessary without the first one
+ if (System.getProperty("oozie.test.hadoop.minicluster2", "false").equals("true")) {
+ setUpEmbeddedHadoop2();
+ }
+ }
+
+ if (System.getProperty("oozie.test.db.host") == null) {
+ System.setProperty("oozie.test.db.host", "localhost");
+ }
+ setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
+
+ setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
+
+ if (yarnCluster != null) {
+ try (final OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"))) {
+ final Configuration conf = createJobConfFromYarnCluster();
+ conf.writeXml(os);
+ }
+ }
+
+ if (System.getProperty("oozie.test.metastore.server", "false").equals("true")) {
+ setupHCatalogServer();
+ }
+
+ if (System.getProperty("oozie.test.hive.server.2", "false").equals("true")) {
+ setupHiveServer2();
+ }
+
+ // Cleanup any leftover database data to make sure we start each test with an empty database
+ if (cleanUpDBTables) {
+ cleanUpDBTables();
+ }
}
/**
@@ -259,7 +443,8 @@ public abstract class XTestCase extends TestCase {
protected void tearDown() throws Exception {
tearDownHiveServer2();
tearDownHCatalogServer();
- testSystemProperties.resetSystemProperties();
+ resetSystemProperties();
+ sysProps = null;
testCaseDir = null;
super.tearDown();
RUNNING_TESTCASES.decrementAndGet();
@@ -313,7 +498,7 @@ public abstract class XTestCase extends TestCase {
* @return Oozie's user Id for running the test cases.
*/
public static String getOozieUser() {
- return System.getProperty(TestConstants.TEST_OOZIE_USER_PROP, System.getProperty("user.name"));
+ return System.getProperty(TEST_OOZIE_USER_PROP, System.getProperty("user.name"));
}
/**
@@ -322,7 +507,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser() {
- return System.getProperty(TestConstants.TEST_USER1_PROP, "test");
+ return System.getProperty(TEST_USER1_PROP, "test");
}
/**
@@ -332,7 +517,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser2() {
- return System.getProperty(TestConstants.TEST_USER2_PROP, "test2");
+ return System.getProperty(TEST_USER2_PROP, "test2");
}
/**
@@ -342,7 +527,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser3() {
- return System.getProperty(TestConstants.TEST_USER3_PROP, "test3");
+ return System.getProperty(TEST_USER3_PROP, "test3");
}
/**
@@ -351,7 +536,7 @@ public abstract class XTestCase extends TestCase {
* @return the test group.
*/
protected static String getTestGroup() {
- return System.getProperty(TestConstants.TEST_GROUP_PROP, "testg");
+ return System.getProperty(TEST_GROUP_PROP, "testg");
}
/**
@@ -360,11 +545,71 @@ public abstract class XTestCase extends TestCase {
* @return the test group.
*/
protected static String getTestGroup2() {
- return System.getProperty(TestConstants.TEST_GROUP_PROP, "testg2");
+ return System.getProperty(TEST_GROUP_PROP, "testg2");
+ }
+
+ /**
+ * Return the test working directory.
+ * <p/>
+ * It returns <code>${oozie.test.dir}/oozietests/TESTCLASSNAME/TESTMETHODNAME</code>.
+ *
+ * @param testCase testcase instance to obtain the working directory.
+ * @return the test working directory.
+ */
+ private String getTestCaseDirInternal(final TestCase testCase) {
+ ParamChecker.notNull(testCase, "testCase");
+ File dir = new File(System.getProperty(OOZIE_TEST_DIR, "target/test-data"));
+ dir = new File(dir, "oozietests").getAbsoluteFile();
+ dir = new File(dir, testCase.getClass().getName());
+ dir = new File(dir, testCase.getName());
+ return dir.getAbsolutePath();
}
protected void delete(final File file) throws IOException {
- testCaseDirectories.delete(file);
+ ParamChecker.notNull(file, "file");
+ if (file.getAbsolutePath().length() < 5) {
+ throw new RuntimeException(XLog.format("path [{0}] is too short, not deleting", file.getAbsolutePath()));
+ }
+ if (file.exists()) {
+ if (file.isDirectory()) {
+ final File[] children = file.listFiles();
+ if (children != null) {
+ for (final File child : children) {
+ delete(child);
+ }
+ }
+ }
+ if (!file.delete()) {
+ throw new RuntimeException(XLog.format("could not delete path [{0}]", file.getAbsolutePath()));
+ }
+ }
+ else {
+ // With a dangling symlink, exists() doesn't return true so try to delete it anyway; we fail silently in case the file
+ // truely doesn't exist
+ file.delete();
+ }
+ }
+
+ /**
+ * Create the test working directory.
+ *
+ * @param testCase testcase instance to obtain the working directory.
+ * @param cleanup indicates if the directory should be cleaned up if it exists.
+ * @return return the path of the test working directory, it is always an absolute path.
+ * @throws Exception if the test working directory could not be created or cleaned up.
+ */
+ private String createTestCaseDir(final TestCase testCase, final boolean cleanup) throws Exception {
+ final String testCaseDir = getTestCaseDirInternal(testCase);
+ System.out.println();
+ System.out.println(XLog.format("Setting testcase work dir[{0}]", testCaseDir));
+ if (cleanup) {
+ delete(new File(testCaseDir));
+ }
+ final File dir = new File(testCaseDir);
+ if (!dir.mkdirs()) {
+ throw new RuntimeException(XLog.format("Could not create testcase dir[{0}]", testCaseDir));
+ }
+ return testCaseDir;
}
/**
@@ -374,8 +619,23 @@ public abstract class XTestCase extends TestCase {
* @return the absolute path to the created directory.
*/
protected String createTestCaseSubDir(final String... subDirNames) {
- return testCaseDirectories.createTestCaseSubdir(testCaseDir, subDirNames);
+ ParamChecker.notNull(subDirNames, "subDirName");
+ if (subDirNames.length == 0) {
+ throw new RuntimeException(XLog.format("Could not create testcase subdir ''; it already exists"));
+ }
+
+ File dir = new File(testCaseDir);
+ for (int i = 0; i < subDirNames.length; i++) {
+ ParamChecker.notNull(subDirNames[i], "subDirName[" + i + "]");
+ dir = new File(dir, subDirNames[i]);
+ }
+
+ if (!dir.mkdirs()) {
+ throw new RuntimeException(XLog.format("Could not create testcase subdir[{0}]", dir));
+ }
+ return dir.getAbsolutePath();
}
+
/**
* Set a system property for the duration of the method test case.
* <p/>
@@ -385,13 +645,42 @@ public abstract class XTestCase extends TestCase {
* @param value value to set.
*/
protected void setSystemProperty(final String name, final String value) {
- testSystemProperties.setSystemProperty(name, value);
+ if (sysProps == null) {
+ sysProps = new HashMap<String, String>();
+ }
+ if (!sysProps.containsKey(name)) {
+ final String currentValue = System.getProperty(name);
+ sysProps.put(name, currentValue);
+ }
+ if (value != null) {
+ System.setProperty(name, value);
+ }
+ else {
+ System.getProperties().remove(name);
+ }
+ }
+
+ /**
+ * Reset changed system properties to their original values. <p/> Called from {@link #tearDown}.
+ */
+ private void resetSystemProperties() {
+ if (sysProps != null) {
+ for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
+ if (entry.getValue() != null) {
+ System.setProperty(entry.getKey(), entry.getValue());
+ }
+ else {
+ System.getProperties().remove(entry.getKey());
+ }
+ }
+ sysProps.clear();
+ }
}
/**
* A predicate 'closure' used by {@link XTestCase#waitFor} method.
*/
- public interface Predicate {
+ public static interface Predicate {
/**
* Perform a predicate evaluation.
@@ -399,7 +688,7 @@ public abstract class XTestCase extends TestCase {
* @return the boolean result of the evaluation.
* @throws Exception thrown if the predicate evaluation could not evaluate.
*/
- boolean evaluate() throws Exception;
+ public boolean evaluate() throws Exception;
}
/**
@@ -451,45 +740,45 @@ public abstract class XTestCase extends TestCase {
}
/**
- * Return the Hadoop Job Tracker to use for testing. </p> The value is taken from the Java sytem property
- * {@link TestConstants#OOZIE_TEST_JOB_TRACKER}, if this property is not set, the assumed value is 'locahost:9001'.
+ * Return the Hadoop Job Tracker to use for testing. </p> The value is taken from the Java sytem property {@link
+ * #OOZIE_TEST_JOB_TRACKER}, if this property is not set, the assumed value is 'locahost:9001'.
*
* @return the job tracker URI.
*/
protected String getJobTrackerUri() {
- return System.getProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "localhost:9001");
+ return System.getProperty(OOZIE_TEST_JOB_TRACKER, "localhost:9001");
}
/**
- * Return the Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property
- * {@link TestConstants#OOZIE_TEST_NAME_NODE}, if this property is not set, the assumed value is 'locahost:9000'.
+ * Return the Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property {@link
+ * #OOZIE_TEST_NAME_NODE}, if this property is not set, the assumed value is 'locahost:9000'.
*
* @return the name node URI.
*/
protected String getNameNodeUri() {
- return System.getProperty(TestConstants.OOZIE_TEST_NAME_NODE, "hdfs://localhost:9000");
+ return System.getProperty(OOZIE_TEST_NAME_NODE, "hdfs://localhost:9000");
}
/**
- * Return the second Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property
- * {@link TestConstants#OOZIE_TEST_NAME_NODE2}, if this property is not set, the assumed value is 'locahost:9100'.
+ * Return the second Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property {@link
+ * #OOZIE_TEST_NAME_NODE2}, if this property is not set, the assumed value is 'locahost:9100'.
*
* @return the second name node URI.
*/
protected String getNameNode2Uri() {
- return System.getProperty(TestConstants.OOZIE_TEST_NAME_NODE2, "hdfs://localhost:9100");
+ return System.getProperty(OOZIE_TEST_NAME_NODE2, "hdfs://localhost:9100");
}
- protected String getKeytabFile() {
+ public String getKeytabFile() {
final String defaultFile = new File(System.getProperty("user.home"), "oozie.keytab").getAbsolutePath();
return System.getProperty("oozie.test.kerberos.keytab.file", defaultFile);
}
- String getRealm() {
+ public String getRealm() {
return System.getProperty("oozie.test.kerberos.realm", "LOCALHOST");
}
- protected String getOoziePrincipal() {
+ public String getOoziePrincipal() {
return System.getProperty("oozie.test.kerberos.oozie.principal",
getOozieUser() + "/localhost") + "@" + getRealm();
}
@@ -505,7 +794,108 @@ public abstract class XTestCase extends TestCase {
* @throws Exception
*/
protected final void cleanUpDBTables() throws Exception {
- new TestDbCleaner().cleanDbTables();
+ // If the Services are already loaded, then a test is likely calling this for something specific and we shouldn't mess with
+ // the Services; so just cleanup the database
+ if (Services.get() != null) {
+ cleanUpDBTablesInternal();
+ }
+ else {
+ // Otherwise, this is probably being called during setup() and we should just load the minimal set of required Services
+ // needed to cleanup the database and shut them down when done; the test will likely start its own Services later and
+ // we don't want to interfere
+ try {
+ final Services services = new Services();
+ services.getConf().set(Services.CONF_SERVICE_CLASSES, MINIMAL_SERVICES_FOR_DB_CLEANUP);
+ services.init();
+ cleanUpDBTablesInternal();
+ } finally {
+ if (Services.get() != null) {
+ Services.get().destroy();
+ }
+ }
+ }
+ }
+
+ private void cleanUpDBTablesInternal() throws StoreException {
+ final EntityManager entityManager = Services.get().get(JPAService.class).getEntityManager();
+ entityManager.setFlushMode(FlushModeType.COMMIT);
+ entityManager.getTransaction().begin();
+
+ Query q = entityManager.createNamedQuery("GET_WORKFLOWS");
+ final List<WorkflowJobBean> wfjBeans = q.getResultList();
+ final int wfjSize = wfjBeans.size();
+ for (final WorkflowJobBean w : wfjBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createNamedQuery("GET_ACTIONS");
+ final List<WorkflowActionBean> wfaBeans = q.getResultList();
+ final int wfaSize = wfaBeans.size();
+ for (final WorkflowActionBean w : wfaBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createNamedQuery("GET_COORD_JOBS");
+ final List<CoordinatorJobBean> cojBeans = q.getResultList();
+ final int cojSize = cojBeans.size();
+ for (final CoordinatorJobBean w : cojBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createNamedQuery("GET_COORD_ACTIONS");
+ final List<CoordinatorActionBean> coaBeans = q.getResultList();
+ final int coaSize = coaBeans.size();
+ for (final CoordinatorActionBean w : coaBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createNamedQuery("GET_BUNDLE_JOBS");
+ final List<BundleJobBean> bjBeans = q.getResultList();
+ final int bjSize = bjBeans.size();
+ for (final BundleJobBean w : bjBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createNamedQuery("GET_BUNDLE_ACTIONS");
+ final List<BundleActionBean> baBeans = q.getResultList();
+ final int baSize = baBeans.size();
+ for (final BundleActionBean w : baBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createNamedQuery("GET_SLA_EVENTS");
+ final List<SLAEventBean> slaBeans = q.getResultList();
+ final int slaSize = slaBeans.size();
+ for (final SLAEventBean w : slaBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createQuery("select OBJECT(w) from SLARegistrationBean w");
+ final List<SLARegistrationBean> slaRegBeans = q.getResultList();
+ final int slaRegSize = slaRegBeans.size();
+ for (final SLARegistrationBean w : slaRegBeans) {
+ entityManager.remove(w);
+ }
+
+ q = entityManager.createQuery("select OBJECT(w) from SLASummaryBean w");
+ final List<SLASummaryBean> sdBeans = q.getResultList();
+ final int ssSize = sdBeans.size();
+ for (final SLASummaryBean w : sdBeans) {
+ entityManager.remove(w);
+ }
+
+ entityManager.getTransaction().commit();
+ entityManager.close();
+ log.info(wfjSize + " entries in WF_JOBS removed from DB!");
+ log.info(wfaSize + " entries in WF_ACTIONS removed from DB!");
+ log.info(cojSize + " entries in COORD_JOBS removed from DB!");
+ log.info(coaSize + " entries in COORD_ACTIONS removed from DB!");
+ log.info(bjSize + " entries in BUNDLE_JOBS removed from DB!");
+ log.info(baSize + " entries in BUNDLE_ACTIONS removed from DB!");
+ log.info(slaSize + " entries in SLA_EVENTS removed from DB!");
+ log.info(slaRegSize + " entries in SLA_REGISTRATION removed from DB!");
+ log.info(ssSize + " entries in SLA_SUMMARY removed from DB!");
+
}
private static MiniDFSCluster dfsCluster = null;
@@ -524,24 +914,33 @@ public abstract class XTestCase extends TestCase {
ClasspathUtils.setUsingMiniYarnCluster(true);
final int dataNodes = 2;
final String oozieUser = getOozieUser();
- final JobConf dfsConfig = testConfigurations.createDFSConfig(getOozieUser(), getTestGroup());
-
- setupUgi(oozieUser);
+ final JobConf dfsConfig = createDFSConfig();
+ final String[] userGroups = new String[]{getTestGroup(), getTestGroup2()};
+ UserGroupInformation.createUserForTesting(oozieUser, userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser3(), new String[]{"users"});
try {
- setupDfsCluster(dataNodes, dfsConfig);
+ dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
+ .numDataNodes(dataNodes)
+ .format(true)
+ .racks(null)
+ .build();
createHdfsPathsAndSetupPermissions();
- setupYarnCluster(dfsConfig);
-
+ final Configuration yarnConfig = createYarnConfig(dfsConfig);
+ yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
+ yarnCluster.init(yarnConfig);
+ yarnCluster.start();
final JobConf jobConf = new JobConf(yarnCluster.getConfig());
- System.setProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, jobConf.get("mapred.job.tracker"));
+ System.setProperty(OOZIE_TEST_JOB_TRACKER, jobConf.get("mapred.job.tracker"));
final String rmAddress = jobConf.get("yarn.resourcemanager.address");
if (rmAddress != null) {
- System.setProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, rmAddress);
+ System.setProperty(OOZIE_TEST_JOB_TRACKER, rmAddress);
}
- System.setProperty(TestConstants.OOZIE_TEST_NAME_NODE, dfsCluster.getFileSystem().getUri().toString());
+ System.setProperty(OOZIE_TEST_NAME_NODE, dfsCluster.getFileSystem().getUri().toString());
ProxyUsers.refreshSuperUserGroupsConfiguration(dfsConfig);
} catch (final Exception ex) {
shutdownMiniCluster();
@@ -551,30 +950,6 @@ public abstract class XTestCase extends TestCase {
}
}
- private void setupDfsCluster(final int dataNodes, final JobConf dfsConfig) throws IOException {
- dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
- .numDataNodes(dataNodes)
- .format(true)
- .racks(null)
- .build();
- }
-
- private void setupYarnCluster(final JobConf dfsConfig) {
- final Configuration yarnConfig = testConfigurations.createYarnConfig(dfsConfig);
- yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
- yarnCluster.init(yarnConfig);
- yarnCluster.start();
- }
-
- private void setupUgi(final String oozieUser) {
- final String[] userGroups = new String[]{getTestGroup(), getTestGroup2()};
-
- UserGroupInformation.createUserForTesting(oozieUser, userGroups);
- UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
- UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
- UserGroupInformation.createUserForTesting(getTestUser3(), new String[]{"users"});
- }
-
private void createHdfsPathsAndSetupPermissions() throws IOException {
final FileSystem fileSystem = dfsCluster.getFileSystem();
@@ -599,7 +974,7 @@ public abstract class XTestCase extends TestCase {
try {
System.setProperty("test.build.data", FilenameUtils.concat(testBuildDataSaved, "2"));
// Only DFS cluster is created based upon current need
- dfsCluster2 = new MiniDFSCluster(testConfigurations.createDFSConfig(getOozieUser(), getTestGroup()), 2, true, null);
+ dfsCluster2 = new MiniDFSCluster(createDFSConfig(), 2, true, null);
final FileSystem fileSystem = dfsCluster2.getFileSystem();
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("/user"));
@@ -607,7 +982,7 @@ public abstract class XTestCase extends TestCase {
fileSystem.setPermission(new Path("target/test-data"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
- System.setProperty(TestConstants.OOZIE_TEST_NAME_NODE2, fileSystem.getConf().get("fs.default.name"));
+ System.setProperty(OOZIE_TEST_NAME_NODE2, fileSystem.getConf().get("fs.default.name"));
} catch (final Exception ex) {
shutdownMiniCluster2();
throw ex;
@@ -618,6 +993,44 @@ public abstract class XTestCase extends TestCase {
}
}
+ private JobConf createDFSConfig() throws UnknownHostException {
+ final JobConf conf = new JobConf();
+ conf.set("dfs.block.access.token.enable", "false");
+ conf.set("dfs.permissions", "true");
+ conf.set("hadoop.security.authentication", "simple");
+
+ //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
+ final StringBuilder sb = new StringBuilder();
+ sb.append("127.0.0.1,localhost");
+ for (final InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
+ sb.append(",").append(i.getCanonicalHostName());
+ }
+ conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts", sb.toString());
+
+ conf.set("hadoop.proxyuser." + getOozieUser() + ".groups", getTestGroup());
+ conf.set("mapred.tasktracker.map.tasks.maximum", "4");
+ conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
+
+ conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
+
+ // Scheduler properties required for YARN CapacityScheduler to work
+ conf.set("yarn.scheduler.capacity.root.queues", "default");
+ conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+ // Required to prevent deadlocks with YARN CapacityScheduler
+ conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
+
+ return conf;
+ }
+
+ private Configuration createYarnConfig(final Configuration parentConfig) {
+ final Configuration yarnConfig = new YarnConfiguration(parentConfig);
+
+ yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
+ yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
+
+ return yarnConfig;
+ }
+
protected void setupHCatalogServer() throws Exception {
if (hcatServer == null) {
hcatServer = new MiniHCatServer(RUNMODE.SERVER, createJobConf());
@@ -626,8 +1039,13 @@ public abstract class XTestCase extends TestCase {
}
}
- private void tearDownHCatalogServer() throws Exception {
+ protected void tearDownHCatalogServer() throws Exception {
// TODO: This doesn't properly shutdown the metastore. For now, just keep the current one running once it's been started
+// if (hcatServer != null) {
+// hcatServer.shutdown();
+// hcatServer = null;
+// log.info("Metastore server shutdown");
+// }
}
protected void setupHiveServer2() throws Exception {
@@ -648,7 +1066,7 @@ public abstract class XTestCase extends TestCase {
}
}
- private void tearDownHiveServer2() {
+ protected void tearDownHiveServer2() {
if (hiveserver2 != null && hiveserver2.isStarted()) {
hiveserver2.stop();
hiveserver2 = null;
@@ -706,12 +1124,12 @@ public abstract class XTestCase extends TestCase {
private static class MiniClusterShutdownMonitor extends Thread {
- MiniClusterShutdownMonitor() {
+ public MiniClusterShutdownMonitor() {
setDaemon(true);
}
public void run() {
- final long shutdownWait = Long.parseLong(System.getProperty(TestConstants.TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
+ final long shutdownWait = Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
LAST_TESTCASE_FINISHED.set(System.currentTimeMillis());
while (true) {
if (RUNNING_TESTCASES.get() == 0) {
@@ -730,29 +1148,49 @@ public abstract class XTestCase extends TestCase {
}
}
+ @SuppressWarnings("deprecation")
+ private JobConf createJobConfFromYarnCluster() {
+ final JobConf jobConf = new JobConf();
+ final JobConf jobConfYarn = new JobConf(yarnCluster.getConfig());
+ for (final Entry<String, String> entry : jobConfYarn) {
+ // MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
+ // TODO call conf.unset after moving completely to Hadoop 2.x
+ if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
+ jobConf.set(entry.getKey(), entry.getValue());
+ }
+ }
+ return jobConf;
+ }
+
/**
* Returns a jobconf preconfigured to talk with the test cluster/minicluster.
* @return a jobconf preconfigured to talk with the test cluster/minicluster.
*/
protected JobConf createJobConf() throws IOException {
+ final JobConf jobConf;
+
if (yarnCluster != null) {
- return testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
+ jobConf = createJobConfFromYarnCluster();
} else {
- return testConfigurations.createPristineJobConf(getJobTrackerUri(), getNameNodeUri());
+ jobConf = new JobConf();
+ jobConf.set("mapred.job.tracker", getJobTrackerUri());
+ jobConf.set("fs.default.name", getNameNodeUri());
}
+
+ return jobConf;
}
/**
* A 'closure' used by {@link XTestCase#executeWhileJobTrackerIsShutdown} method.
*/
- public interface ShutdownJobTrackerExecutable {
+ public static interface ShutdownJobTrackerExecutable {
/**
* Execute some code
*
* @throws Exception thrown if the executed code throws an exception.
*/
- void execute() throws Exception;
+ public void execute() throws Exception;
}
/**
@@ -771,26 +1209,32 @@ public abstract class XTestCase extends TestCase {
protected Services setupServicesForHCatalog() throws ServiceException {
final Services services = new Services();
-
- setConfigurationForHCatalogImpl(services);
-
+ setupServicesForHCataLogImpl(services);
return services;
}
- private void setConfigurationForHCatalogImpl(final Services services) {
- testConfigurations.setConfigurationForHCatalog(services);
-
+ private void setupServicesForHCataLogImpl(final Services services) {
+ final Configuration conf = services.getConf();
+ conf.set(Services.CONF_SERVICE_EXT_CLASSES,
+ JMSAccessorService.class.getName() + "," +
+ PartitionDependencyManagerService.class.getName() + "," +
+ HCatAccessorService.class.getName());
+ conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
+ "default=java.naming.factory.initial#" + ActiveMQConnFactory + ";" +
+ "java.naming.provider.url#" + localActiveMQBroker +
+ "connectionFactoryNames#" + "ConnectionFactory");
+ conf.set(URIHandlerService.URI_HANDLERS,
+ FSURIHandler.class.getName() + "," + HCatURIHandler.class.getName());
setSystemProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
setSystemProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
}
protected Services setupServicesForHCatalog(final Services services) throws ServiceException {
- setConfigurationForHCatalogImpl(services);
-
+ setupServicesForHCataLogImpl(services);
return services;
}
- private YarnApplicationState waitUntilYarnAppState(final String externalId, final EnumSet<YarnApplicationState> acceptedStates)
+ protected YarnApplicationState waitUntilYarnAppState(final String externalId, final EnumSet<YarnApplicationState> acceptedStates)
throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
@@ -828,7 +1272,7 @@ public abstract class XTestCase extends TestCase {
assertEquals("YARN App state", YarnApplicationState.KILLED, state);
}
- protected YarnApplicationState getYarnApplicationState(final String externalId) throws HadoopAccessorException, IOException, YarnException {
+ protected YarnApplicationState getYarnApplicationState(String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
@@ -868,7 +1312,9 @@ public abstract class XTestCase extends TestCase {
}
}
- protected TestLogAppender getTestLogAppender() {
+ public TestLogAppender getTestLogAppender() {
return new TestLogAppender();
}
-}
\ No newline at end of file
+
+}
+
[17/48] oozie git commit: Fix test failures for OOZIE-2225 (pbacsko
via rohini)
Posted by pb...@apache.org.
Fix test failures for OOZIE-2225 (pbacsko via rohini)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/b327fdb5
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/b327fdb5
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/b327fdb5
Branch: refs/heads/oya
Commit: b327fdb5b08cbcc797b2cf25b3e13c3310453a9b
Parents: 3ee71d4
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Fri Nov 18 12:04:03 2016 -0800
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Fri Nov 18 12:04:03 2016 -0800
----------------------------------------------------------------------
.../executor/jpa/WorkflowsJobGetJPAExecutor.java | 1 +
.../apache/oozie/store/StoreStatusFilter.java | 1 +
.../jpa/TestBundleJobInfoGetJPAExecutor.java | 19 +++++++++++++++++++
.../jpa/TestCoordJobInfoGetJPAExecutor.java | 19 +++++++++++++++++++
.../jpa/TestWorkflowsJobGetJPAExecutor.java | 19 +++++++++++++++++++
release-log.txt | 2 +-
6 files changed, 60 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/b327fdb5/core/src/main/java/org/apache/oozie/executor/jpa/WorkflowsJobGetJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/executor/jpa/WorkflowsJobGetJPAExecutor.java b/core/src/main/java/org/apache/oozie/executor/jpa/WorkflowsJobGetJPAExecutor.java
index 13af8f8..f50c1a4 100644
--- a/core/src/main/java/org/apache/oozie/executor/jpa/WorkflowsJobGetJPAExecutor.java
+++ b/core/src/main/java/org/apache/oozie/executor/jpa/WorkflowsJobGetJPAExecutor.java
@@ -278,6 +278,7 @@ public class WorkflowsJobGetJPAExecutor implements JPAExecutor<WorkflowsInfo> {
// w.id = text || w.appName.contains(text) || w.user.contains(text)
else if (entry.getKey().equalsIgnoreCase(OozieClient.FILTER_TEXT)) {
StoreStatusFilter.filterJobsUsingText(filter, sb, isEnabled, seletStr, valArray, orArray, colArray);
+ isEnabled = true;
}
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/b327fdb5/core/src/main/java/org/apache/oozie/store/StoreStatusFilter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/store/StoreStatusFilter.java b/core/src/main/java/org/apache/oozie/store/StoreStatusFilter.java
index b649ae5..0850b75 100644
--- a/core/src/main/java/org/apache/oozie/store/StoreStatusFilter.java
+++ b/core/src/main/java/org/apache/oozie/store/StoreStatusFilter.java
@@ -325,6 +325,7 @@ public class StoreStatusFilter {
// job.id = text || job.appName.contains(text) || job.user.contains(text)
else if (entry.getKey().equalsIgnoreCase(OozieClient.FILTER_TEXT)) {
filterJobsUsingText(filter, sb, isEnabled, seletStr, valArray, orArray, colArray);
+ isEnabled = true;
}
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/b327fdb5/core/src/test/java/org/apache/oozie/executor/jpa/TestBundleJobInfoGetJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestBundleJobInfoGetJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestBundleJobInfoGetJPAExecutor.java
index e0851f5..eb0d538 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestBundleJobInfoGetJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestBundleJobInfoGetJPAExecutor.java
@@ -240,6 +240,25 @@ public class TestBundleJobInfoGetJPAExecutor extends XDataTestCase {
assertEquals(ret.getBundleJobs().size(), 1);
}
+ public void testGetJobInfoForTextAndStatus() throws Exception {
+ BundleJobBean bundleJob = addRecordToBundleJobTable(BundleJob.Status.RUNNING, false);
+ bundleJob.setAppName("bundle-job-1");
+ BundleJobQueryExecutor.getInstance().executeUpdate(BundleJobQueryExecutor.BundleJobQuery.UPDATE_BUNDLE_JOB, bundleJob);
+
+ Map<String, List<String>> filter = new HashMap<String, List<String>>();
+ List<String> textFilterList = new ArrayList<String>();
+ textFilterList.add("bundle-job-1");
+ List<String> textStatusList = new ArrayList<String>();
+ textStatusList.add(BundleJob.Status.RUNNING.toString());
+ filter.put(OozieClient.FILTER_TEXT, textFilterList);
+ filter.put(OozieClient.FILTER_STATUS, textStatusList);
+
+ JPAService jpaService = Services.get().get(JPAService.class);
+ BundleJobInfoGetJPAExecutor bundleInfoGetCmd = new BundleJobInfoGetJPAExecutor(filter, 1, 20);
+ BundleJobInfo bundleJobsInfo = jpaService.execute(bundleInfoGetCmd);
+ assertEquals(1, bundleJobsInfo.getBundleJobs().size());
+ }
+
private void _testGetJobInfoForGroup() throws Exception {
JPAService jpaService = Services.get().get(JPAService.class);
assertNotNull(jpaService);
http://git-wip-us.apache.org/repos/asf/oozie/blob/b327fdb5/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobInfoGetJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobInfoGetJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobInfoGetJPAExecutor.java
index a8d4d5b..43334f9 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobInfoGetJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobInfoGetJPAExecutor.java
@@ -324,6 +324,25 @@ public class TestCoordJobInfoGetJPAExecutor extends XDataTestCase {
assertEquals(ret.getCoordJobs().size(), 1);
}
+ public void testGetJobInfoForTextAndStatus() throws Exception {
+ CoordinatorJobBean coordinatorJob = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, false, false);
+ coordinatorJob.setAppName("coord-job-1");
+ CoordJobQueryExecutor.getInstance().executeUpdate(CoordJobQueryExecutor.CoordJobQuery.UPDATE_COORD_JOB, coordinatorJob);
+
+ Map<String, List<String>> filter = new HashMap<String, List<String>>();
+ List<String> textFilterList = new ArrayList<String>();
+ textFilterList.add("coord-job-1");
+ List<String> textStatusList = new ArrayList<String>();
+ textStatusList.add(CoordinatorJob.Status.RUNNING.toString());
+ filter.put(OozieClient.FILTER_TEXT, textFilterList);
+ filter.put(OozieClient.FILTER_STATUS, textStatusList);
+
+ JPAService jpaService = Services.get().get(JPAService.class);
+ CoordJobInfoGetJPAExecutor coordInfoGetCmd = new CoordJobInfoGetJPAExecutor(filter, 1, 20);
+ CoordinatorJobInfo coordJobsInfo = jpaService.execute(coordInfoGetCmd);
+ assertEquals(1, coordJobsInfo.getCoordJobs().size());
+ }
+
private void _testGetJobInfoForFrequency() throws Exception {
JPAService jpaService = Services.get().get(JPAService.class);
assertNotNull(jpaService);
http://git-wip-us.apache.org/repos/asf/oozie/blob/b327fdb5/core/src/test/java/org/apache/oozie/executor/jpa/TestWorkflowsJobGetJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestWorkflowsJobGetJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestWorkflowsJobGetJPAExecutor.java
index 398678b..992f566 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestWorkflowsJobGetJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestWorkflowsJobGetJPAExecutor.java
@@ -221,6 +221,25 @@ public class TestWorkflowsJobGetJPAExecutor extends XDataTestCase {
assertEquals(2, wfInfo.getWorkflows().size());
}
+ public void testGetWFInfoForTextAndStatus() throws Exception {
+ WorkflowJobBean workflowJob = addRecordToWfJobTable(WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
+ workflowJob.setAppName("wf-name-1");
+ WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW, workflowJob);
+
+ Map<String, List<String>> filter = new HashMap<String, List<String>>();
+ List<String> textFilterList = new ArrayList<String>();
+ textFilterList.add("wf-name-1");
+ List<String> textStatusList = new ArrayList<String>();
+ textStatusList.add(WorkflowJob.Status.PREP.toString());
+ filter.put(OozieClient.FILTER_TEXT, textFilterList);
+ filter.put(OozieClient.FILTER_STATUS, textStatusList);
+
+ JPAService jpaService = Services.get().get(JPAService.class);
+ WorkflowsJobGetJPAExecutor wfGetCmd = new WorkflowsJobGetJPAExecutor(filter, 1, 20);
+ WorkflowsInfo wfInfo = jpaService.execute(wfGetCmd);
+ assertEquals(1, wfInfo.getWorkflows().size());
+ }
+
public void testWfJobsGetWithCreatedTime() throws Exception {
JPAService jpaService = Services.get().get(JPAService.class);
Date createdTime1 = DateUtils.parseDateUTC("2012-01-01T10:00Z");
http://git-wip-us.apache.org/repos/asf/oozie/blob/b327fdb5/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 5e9b8ff..caceaa9 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -2,7 +2,7 @@
OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
-OOZIE-2225 Add wild card filter for gathering jobs (sai-krish via rkanter)
+OOZIE-2225 Add wild card filter for gathering jobs (sai-krish,pbacsko via rkanter,rohini)
OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
OOZIE-1986 Add FindBugs report to pre-commit build (andras.piros via rkanter)
OOZIE-2634 Queue dump command message is confusing when the queue is empty (andras.piros via rkanter)
[30/48] oozie git commit: OOZIE-2723 JSON.org license is now CatX
(rkanter, abhishekbafna via shwethags) - update
Posted by pb...@apache.org.
OOZIE-2723 JSON.org license is now CatX (rkanter, abhishekbafna via shwethags) - update
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/93c17cc2
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/93c17cc2
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/93c17cc2
Branch: refs/heads/oya
Commit: 93c17cc28112b2319d5557a05507d46519174096
Parents: ed1e252
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Fri Nov 25 10:21:37 2016 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Fri Nov 25 10:21:37 2016 +0530
----------------------------------------------------------------------
NOTICE.txt | 3 +++
RELEASE_NOTES.txt | 4 ----
bin/create-release-artifact | 2 --
pom.xml | 1 -
sharelib/hive/pom.xml | 4 ----
sharelib/hive2/pom.xml | 10 ----------
sharelib/pig/pom.xml | 4 ----
src/main/assemblies/distro-jetty.xml | 1 -
src/main/assemblies/distro-tomcat.xml | 1 -
9 files changed, 3 insertions(+), 27 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/NOTICE.txt
----------------------------------------------------------------------
diff --git a/NOTICE.txt b/NOTICE.txt
index b70f114..e2a937c 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -9,6 +9,9 @@ Copyright 2011 The Apache Software Foundation
This product includes software developed by The Apache Software
Foundation (http://www.apache.org/).
+This product includes org.json (http://www.json.org/java/index.html),
+Copyright (c) 2002 JSON.org
+
Detailed License information for all components can be found in the
documentation in the ooziedocs.war at index.html##LicenseInfo
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/RELEASE_NOTES.txt
----------------------------------------------------------------------
diff --git a/RELEASE_NOTES.txt b/RELEASE_NOTES.txt
deleted file mode 100644
index 6af49f6..0000000
--- a/RELEASE_NOTES.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-These notes are for Oozie 4.3.0 release.
-
-Oozie no longer includes JSON.org dependencies. If Hive or Hive2 action in workflow needs JSON.org dependencies, then they need to be
-added manually into respective sharelib.
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/bin/create-release-artifact
----------------------------------------------------------------------
diff --git a/bin/create-release-artifact b/bin/create-release-artifact
index cee5fa6..1f1bcd2 100755
--- a/bin/create-release-artifact
+++ b/bin/create-release-artifact
@@ -58,8 +58,6 @@ run tar czf oozie-${releaseVersion}.tar.gz oozie-$releaseVersion
run cp oozie-$releaseVersion/release-log.txt .
-run cp oozie-$releaseVersion/RELEASE_NOTES.txt .
-
run cd oozie-$releaseVersion
run mvn apache-rat:check
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 551a622..7e67b77 100644
--- a/pom.xml
+++ b/pom.xml
@@ -119,7 +119,6 @@
<httpcore.version>4.3.3</httpcore.version>
<httpclient.version>4.3.6</httpclient.version>
<kyro.version>2.22</kyro.version>
- <json.version>20090211</json.version>
</properties>
<modules>
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/sharelib/hive/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/hive/pom.xml b/sharelib/hive/pom.xml
index 2b52a82..c5e9f12 100644
--- a/sharelib/hive/pom.xml
+++ b/sharelib/hive/pom.xml
@@ -91,10 +91,6 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
</exclusion>
- <exclusion>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- </exclusion>
</exclusions>
</dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/sharelib/hive2/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/hive2/pom.xml b/sharelib/hive2/pom.xml
index 4ef219a..e81bfbe 100644
--- a/sharelib/hive2/pom.xml
+++ b/sharelib/hive2/pom.xml
@@ -91,20 +91,10 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
</exclusion>
- <exclusion>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- </exclusion>
</exclusions>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>${json.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-beeline</artifactId>
<scope>compile</scope>
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/sharelib/pig/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/pig/pom.xml b/sharelib/pig/pom.xml
index da246f9..67e5bf2 100644
--- a/sharelib/pig/pom.xml
+++ b/sharelib/pig/pom.xml
@@ -138,10 +138,6 @@
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
</exclusion>
- <exclusion>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- </exclusion>
</exclusions>
</dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/src/main/assemblies/distro-jetty.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-jetty.xml b/src/main/assemblies/distro-jetty.xml
index ba23a50..6e3db57 100644
--- a/src/main/assemblies/distro-jetty.xml
+++ b/src/main/assemblies/distro-jetty.xml
@@ -41,7 +41,6 @@
<include>NOTICE.txt</include>
<include>README.txt</include>
<include>release-log.txt</include>
- <include>RELEASE_NOTES.txt</include>
</includes>
</fileSet>
<fileSet>
http://git-wip-us.apache.org/repos/asf/oozie/blob/93c17cc2/src/main/assemblies/distro-tomcat.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-tomcat.xml b/src/main/assemblies/distro-tomcat.xml
index ba4f56b..82ba8f9 100644
--- a/src/main/assemblies/distro-tomcat.xml
+++ b/src/main/assemblies/distro-tomcat.xml
@@ -41,7 +41,6 @@
<include>NOTICE.txt</include>
<include>README.txt</include>
<include>release-log.txt</include>
- <include>RELEASE_NOTES.txt</include>
</includes>
</fileSet>
<fileSet>
[47/48] oozie git commit: OOZIE-2616 Add Tez profile for Hive and Pig
sharelibs (poeppt via rkanter)
Posted by pb...@apache.org.
OOZIE-2616 Add Tez profile for Hive and Pig sharelibs (poeppt via rkanter)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/fe182fd8
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/fe182fd8
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/fe182fd8
Branch: refs/heads/oya
Commit: fe182fd8127cb73cdc5502e1536e69724e87f69a
Parents: 5f53676
Author: Robert Kanter <rk...@cloudera.com>
Authored: Mon Dec 12 17:52:03 2016 -0800
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Mon Dec 12 17:52:03 2016 -0800
----------------------------------------------------------------------
docs/src/site/twiki/DG_QuickStart.twiki | 3 +
docs/src/site/twiki/ENG_Building.twiki | 3 +
pom.xml | 413 +++++++++++++++++++++++++++
release-log.txt | 1 +
sharelib/hive/pom.xml | 34 +++
sharelib/pig/pom.xml | 34 +++
6 files changed, 488 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/fe182fd8/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki b/docs/src/site/twiki/DG_QuickStart.twiki
index 76c8c64..2b9dc3b 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -43,6 +43,8 @@ suitable when same oozie package needs to be used in multiple set-ups with diffe
available to customise the versions of the dependencies:
-P<profile> - default hadoop-2. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
profile depending on the hadoop version used.
+-Ptez - Bundle tez jars in hive and pig sharelibs. Useful if you want to use tez
++as the execution engine for those applications.
-Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.4.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
-Dhadoop.auth.version=<version> - defaults to hadoop version
-Ddistcp.version=<version> - defaults to hadoop version
@@ -56,6 +58,7 @@ profile depending on the hadoop version used.
-Dcurator.version=<version> - default 2.5.0
-Dhive.version=<version> - default 0.13.1 for hadoop-1, 1.2.0 for hadoop-2 and hadoop-3 profile
-Dhbase.version=<version> - default 0.94.2
+-Dtez.version=<version> - default 0.8.4
</verbatim>
More details on building Oozie can be found on the [[ENG_Building][Building Oozie]] page.
http://git-wip-us.apache.org/repos/asf/oozie/blob/fe182fd8/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/ENG_Building.twiki b/docs/src/site/twiki/ENG_Building.twiki
index 0766976..dda829b 100644
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@ -214,6 +214,8 @@ the versions of the dependencies:
-Puber - Bundle required hadoop and hcatalog libraries in oozie war
-P<profile> - default hadoop-2. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
profile depending on the hadoop version used.
+-Ptez - Bundle tez jars in hive and pig sharelibs. Useful if you want to use tez
+as the execution engine for those applications.
-Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.4.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
-Dhadoop.auth.version=<version> - defaults to hadoop version
-Ddistcp.version=<version> - defaults to hadoop version
@@ -227,6 +229,7 @@ profile depending on the hadoop version used.
-Dcurator.version=<version> - default 2.5.0
-Dhive.version=<version> - default 0.13.1 for hadoop-1, 1.2.0 for hadoop-2 and hadoop-3 profile
-Dhbase.version=<version> - default 0.94.2
+-Dtez.version=<version> - default 0.8.4
</verbatim>
The following properties should be specified when building a release:
http://git-wip-us.apache.org/repos/asf/oozie/blob/fe182fd8/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d5db296..0cb5c78 100644
--- a/pom.xml
+++ b/pom.xml
@@ -104,6 +104,7 @@
<streaming.version>${hadoop.version}</streaming.version>
<distcp.version>${hadoop.version}</distcp.version>
<hadoop.auth.version>${hadoop.version}</hadoop.auth.version>
+ <tez.version>0.8.4</tez.version>
<jetty.version>9.2.19.v20160908</jetty.version>
@@ -2012,5 +2013,417 @@
<spark.bagel.version>1.6.2</spark.bagel.version>
</properties>
</profile>
+ <profile>
+ <id>tez</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-api</artifactId>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-runtime-library</artifactId>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-runtime-internals</artifactId>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-mapreduce</artifactId>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-dag</artifactId>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-yarn-timeline-history-with-acls</artifactId>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+ </profile>
</profiles>
</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/fe182fd8/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index f7b74a2..5aa5352 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2616 Add Tez profile for Hive and Pig sharelibs (poeppt via rkanter)
OOZIE-2741 Remove Tomcat (asasvari via rkanter)
OOZIE-2745 test-patch should also list the failed tests (gezapeti via rkanter)
OOZIE-2740 oozie help misspelled coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
http://git-wip-us.apache.org/repos/asf/oozie/blob/fe182fd8/sharelib/hive/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/hive/pom.xml b/sharelib/hive/pom.xml
index c5e9f12..1331219 100644
--- a/sharelib/hive/pom.xml
+++ b/sharelib/hive/pom.xml
@@ -250,5 +250,39 @@
</plugins>
</build>
+ <profiles>
+ <profile>
+ <id>tez</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-runtime-library</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-runtime-internals</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-mapreduce</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-dag</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-yarn-timeline-history-with-acls</artifactId>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/fe182fd8/sharelib/pig/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/pig/pom.xml b/sharelib/pig/pom.xml
index 67e5bf2..233873c 100644
--- a/sharelib/pig/pom.xml
+++ b/sharelib/pig/pom.xml
@@ -231,6 +231,40 @@
</plugin>
</plugins>
</build>
+ <profiles>
+ <profile>
+ <id>tez</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-runtime-library</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-runtime-internals</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-mapreduce</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-dag</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-yarn-timeline-history-with-acls</artifactId>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
</project>
[31/48] oozie git commit: Fixing remaining issues,
removing TODOs and FIXMEs
Posted by pb...@apache.org.
Fixing remaining issues, removing TODOs and FIXMEs
Change-Id: Ieb762713fee2281cde5a7826bc03eafe16de64b1
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/739d53a4
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/739d53a4
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/739d53a4
Branch: refs/heads/oya
Commit: 739d53a47c101d9d779382177eaa19a9725a0e46
Parents: 8d60f7f
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Fri Nov 25 11:02:44 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Fri Nov 25 11:02:44 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/DistcpActionExecutor.java | 5 ++
.../oozie/action/hadoop/HiveActionExecutor.java | 5 ++
.../oozie/action/hadoop/JavaActionExecutor.java | 53 +++++------
.../action/hadoop/MapReduceActionExecutor.java | 5 ++
.../oozie/action/hadoop/PigActionExecutor.java | 5 ++
.../action/hadoop/SparkActionExecutor.java | 10 +++
.../action/hadoop/SqoopActionExecutor.java | 4 +
.../action/hadoop/AMRMCallBackHandler.java | 2 +-
.../apache/oozie/action/hadoop/LauncherAM.java | 93 +++++++++-----------
.../oozie/action/hadoop/TestLauncherAM.java | 33 ++++---
10 files changed, 122 insertions(+), 93 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
index e000a08..fb26399 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
@@ -107,6 +107,11 @@ public class DistcpActionExecutor extends JavaActionExecutor{
}
@Override
+ protected boolean needToAddMRJars() {
+ return true;
+ }
+
+ @Override
protected String getLauncherMain(Configuration launcherConf, Element actionXml) {
return launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS);
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
index 82a955c..a2d4ca0 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
@@ -133,6 +133,11 @@ public class HiveActionExecutor extends ScriptLanguageActionExecutor {
}
@Override
+ protected boolean needToAddMRJars() {
+ return true;
+ }
+
+ @Override
protected JobConf loadHadoopDefaultResources(Context context, Element actionXml) {
boolean loadDefaultResources = ConfigurationService
.getBoolean(HadoopAccessorService.ACTION_CONFS_LOAD_DEFAULT_RESOURCES);
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index 56226ee..b8c870c 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -29,6 +29,7 @@ import java.nio.ByteBuffer;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@@ -37,7 +38,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
-import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
@@ -144,7 +144,6 @@ public class JavaActionExecutor extends ActionExecutor {
protected static final String HADOOP_USER = "user.name";
protected XLog LOG = XLog.getLog(getClass());
- private static final Pattern heapPattern = Pattern.compile("-Xmx(([0-9]+)[mMgG])");
private static final String JAVA_TMP_DIR_SETTINGS = "-Djava.io.tmpdir=";
public XConfiguration workflowConf = null;
@@ -914,7 +913,6 @@ public class JavaActionExecutor extends ActionExecutor {
}
public void submitLauncher(FileSystem actionFs, final Context context, WorkflowAction action) throws ActionExecutorException {
- JobClient jobClient = null;
boolean exception = false;
YarnClient yarnClient = null;
try {
@@ -1054,20 +1052,6 @@ public class JavaActionExecutor extends ActionExecutor {
if (yarnClient != null) {
Closeables.closeQuietly(yarnClient);
}
-
- if (jobClient != null) {
- try {
- jobClient.close();
- }
- catch (Exception e) {
- if (exception) {
- LOG.error("JobClient error: ", e);
- }
- else {
- throw convertException(e);
- }
- }
- }
}
}
@@ -1107,23 +1091,17 @@ public class JavaActionExecutor extends ActionExecutor {
ClasspathUtils.setupClasspath(env, launcherJobConf);
// FIXME: move this to specific places where it's actually needed - keeping it here for now
- ClasspathUtils.addMapReduceToClasspath(env, launcherJobConf);
-
- // FIXME: Pyspark fix
- // FIXME: Do we want to support mapred.child.env?
- env.put("SPARK_HOME", ".");
+ if (needToAddMRJars()) {
+ ClasspathUtils.addMapReduceToClasspath(env, launcherJobConf);
+ }
- amContainer.setEnvironment(env);
+ addActionSpecificEnvVars(env);
+ amContainer.setEnvironment(Collections.unmodifiableMap(env));
// Set the command
List<String> vargs = new ArrayList<String>(6);
vargs.add(MRApps.crossPlatformifyMREnv(launcherJobConf, ApplicationConstants.Environment.JAVA_HOME)
+ "/bin/java");
- // TODO: OYA: remove attach debugger to AM; useful for debugging
-// vargs.add("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005");
-
- // FIXME: decide what to do with this method call - signature keeps changing
- // MRApps.addLog4jSystemProperties("INFO", 1024 * 1024, 0, vargs, null);
vargs.add("-Dlog4j.configuration=container-log4j.properties");
vargs.add("-Dlog4j.debug=true");
@@ -1143,8 +1121,7 @@ public class JavaActionExecutor extends ActionExecutor {
}
List<String> vargsFinal = ImmutableList.of(mergedCommand.toString());
- LOG.debug("Command to launch container for ApplicationMaster is : "
- + mergedCommand);
+ LOG.debug("Command to launch container for ApplicationMaster is: {0}", mergedCommand);
amContainer.setCommands(vargsFinal);
appContext.setAMContainerSpec(amContainer);
@@ -1381,6 +1358,22 @@ public class JavaActionExecutor extends ActionExecutor {
return action.getExternalId();
}
+ /**
+ * If returns true, it means that we have to add Hadoop MR jars to the classpath. Subclasses should override this method if necessary.
+ *
+ */
+ protected boolean needToAddMRJars() {
+ return false;
+ }
+
+ /**
+ * Adds action-specific environment variables. Default implementation is no-op. Subclasses should override this method if necessary.
+ *
+ */
+ protected void addActionSpecificEnvVars(Map<String, String> env) {
+ // nop
+ }
+
@Override
public void check(Context context, WorkflowAction action) throws ActionExecutorException {
boolean fallback = false;
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
index 11d1787..51a9c15 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
@@ -339,6 +339,11 @@ public class MapReduceActionExecutor extends JavaActionExecutor {
}
@Override
+ protected boolean needToAddMRJars() {
+ return true;
+ }
+
+ @Override
public void check(Context context, WorkflowAction action) throws ActionExecutorException {
Map<String, String> actionData = Collections.emptyMap();
JobConf jobConf = null;
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
index cbb7436..347b034 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/PigActionExecutor.java
@@ -124,6 +124,11 @@ public class PigActionExecutor extends ScriptLanguageActionExecutor {
}
@Override
+ protected boolean needToAddMRJars() {
+ return true;
+ }
+
+ @Override
protected JobConf loadHadoopDefaultResources(Context context, Element actionXml) {
boolean loadDefaultResources = ConfigurationService
.getBoolean(HadoopAccessorService.ACTION_CONFS_LOAD_DEFAULT_RESOURCES);
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
index 32dadf0..c373758 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
@@ -153,6 +153,16 @@ public class SparkActionExecutor extends JavaActionExecutor {
}
@Override
+ protected boolean needToAddMRJars() {
+ return true;
+ }
+
+ @Override
+ protected void addActionSpecificEnvVars(Map<String, String> env) {
+ env.put("SPARK_HOME", ".");
+ }
+
+ @Override
protected String getLauncherMain(Configuration launcherConf, Element actionXml) {
return launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, SPARK_MAIN_CLASS_NAME);
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
index c6266df..54a860e 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SqoopActionExecutor.java
@@ -225,6 +225,10 @@ public class SqoopActionExecutor extends JavaActionExecutor {
}
}
+ @Override
+ protected boolean needToAddMRJars() {
+ return true;
+ }
/**
* Return the sharelib name for the action.
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/AMRMCallBackHandler.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/AMRMCallBackHandler.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/AMRMCallBackHandler.java
index 63213e6..e6c9d04 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/AMRMCallBackHandler.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/AMRMCallBackHandler.java
@@ -53,7 +53,7 @@ public class AMRMCallBackHandler implements AMRMClientAsync.CallbackHandler {
@Override
public float getProgress() {
- return 0.5f; //TODO: OYA: maybe some action types can report better progress?
+ return 0.5f;
}
@Override
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
index 881fa72..a8d1e09 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
@@ -24,7 +24,7 @@ import java.io.StringWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.Permission;
-import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
@@ -42,6 +42,7 @@ import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
import org.apache.hadoop.yarn.exceptions.YarnException;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
public class LauncherAM {
@@ -116,6 +117,8 @@ public class LauncherAM {
Preconditions.checkArgument(!submitterUser.isEmpty(), "Submitter user is undefined");
System.out.println("Submitter user is: " + submitterUser);
+ // We don't need remote/proxy user if the current login user is the workflow submitter
+ // Otherwise we have to create a remote user
if (UserGroupInformation.getLoginUser().getShortUserName().equals(submitterUser)) {
System.out.println("Using login user for UGI");
ugi = UserGroupInformation.getLoginUser();
@@ -160,31 +163,13 @@ public class LauncherAM {
errorHolder.setErrorCause(ex);
throw ex;
}
-
- registerWithRM();
-
actionDir = new Path(launcherJobConf.get(OOZIE_ACTION_DIR_PATH));
- try {
- System.out.println("\nStarting the execution of prepare actions");
- executePrepare(ugi);
- System.out.println("Completed the execution of prepare actions successfully");
- } catch (Exception ex) {
- errorHolder.setErrorMessage("Prepare execution in the Launcher AM has failed");
- errorHolder.setErrorCause(ex);
- throw ex;
- }
-
+ registerWithRM();
+ executePrepare(ugi, errorHolder);
final String[] mainArgs = getMainArguments(launcherJobConf);
-
- // TODO: OYA: should we allow turning this off?
- // TODO: OYA: what should default be?
- if (launcherJobConf.getBoolean("oozie.launcher.print.debug.info", true)) {
- printDebugInfo();
- }
-
+ printDebugInfo();
setupMainConfiguration();
-
launcerExecutedProperly = runActionMain(mainArgs, errorHolder, ugi);
if (launcerExecutedProperly) {
@@ -323,11 +308,12 @@ public class LauncherAM {
}
// Method to execute the prepare actions
- private void executePrepare(UserGroupInformation ugi) throws Exception {
- Exception e = ugi.doAs(new PrivilegedAction<Exception>() {
- @Override
- public Exception run() {
- try {
+ private void executePrepare(UserGroupInformation ugi, ErrorHolder errorHolder) throws Exception {
+ try {
+ System.out.println("\nStarting the execution of prepare actions");
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
String prepareXML = launcherJobConf.get(ACTION_PREPARE_XML);
if (prepareXML != null) {
if (prepareXML.length() != 0) {
@@ -338,16 +324,14 @@ public class LauncherAM {
System.out.println("There are no prepare actions to execute.");
}
}
- } catch (Exception e) {
- e.printStackTrace();
- return e;
+ return null;
}
- return null;
- }
- });
-
- if (e != null) {
- throw e;
+ });
+ System.out.println("Completed the execution of prepare actions successfully");
+ } catch (Exception ex) {
+ errorHolder.setErrorMessage("Prepare execution in the Launcher AM has failed");
+ errorHolder.setErrorCause(ex);
+ throw ex;
}
}
@@ -366,20 +350,21 @@ public class LauncherAM {
System.setProperty("oozie.job.launch.time", String.valueOf(System.currentTimeMillis()));
}
- private boolean runActionMain(final String[] mainArgs, final ErrorHolder eHolder, UserGroupInformation ugi) {
+ private boolean runActionMain(final String[] mainArgs, final ErrorHolder eHolder, UserGroupInformation ugi) throws Exception {
// using AtomicBoolean because we want to modify it inside run()
final AtomicBoolean actionMainExecutedProperly = new AtomicBoolean(false);
- ugi.doAs(new PrivilegedAction<Void>() {
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
- public Void run() {
+ public Void run() throws Exception {
try {
setRecoveryId();
- Class<?> klass = launcherJobConf.getClass(CONF_OOZIE_ACTION_MAIN_CLASS, Object.class);
+ Class<?> klass = launcherJobConf.getClass(CONF_OOZIE_ACTION_MAIN_CLASS, null);
+ Preconditions.checkNotNull(klass, "Launcher class should not be null");
System.out.println("Launcher class: " + klass.toString());
Method mainMethod = klass.getMethod("main", String[].class);
// Enable LauncherSecurityManager to catch System.exit calls
- launcherSecurityManager.set();
+ launcherSecurityManager.enable();
mainMethod.invoke(null, (Object) mainArgs);
System.out.println();
@@ -430,7 +415,7 @@ public class LauncherAM {
eHolder.setErrorCause(t);
} finally {
// Disable LauncherSecurityManager
- launcherSecurityManager.unset();
+ launcherSecurityManager.disable();
}
return null;
@@ -497,7 +482,11 @@ public class LauncherAM {
private void updateActionDataWithFailure(ErrorHolder eHolder, Map<String, String> actionData) {
if (eHolder.getErrorCause() != null && eHolder.getErrorCause().getMessage() != null) {
- eHolder.setErrorMessage(eHolder.getErrorMessage() + ", " + eHolder.getErrorCause().getMessage());
+ if (Objects.equal(eHolder.getErrorMessage(), eHolder.getErrorCause().getMessage())) {
+ eHolder.setErrorMessage(eHolder.getErrorMessage());
+ } else {
+ eHolder.setErrorMessage(eHolder.getErrorMessage() + ", " + eHolder.getErrorCause().getMessage());
+ }
}
Properties errorProps = new Properties();
@@ -553,27 +542,27 @@ public class LauncherAM {
public static class LauncherSecurityManager extends SecurityManager {
private boolean exitInvoked;
private int exitCode;
- private SecurityManager securityManager;
+ private SecurityManager originalSecurityManager;
public LauncherSecurityManager() {
exitInvoked = false;
exitCode = 0;
- securityManager = System.getSecurityManager();
+ originalSecurityManager = System.getSecurityManager();
}
@Override
public void checkPermission(Permission perm, Object context) {
- if (securityManager != null) {
+ if (originalSecurityManager != null) {
// check everything with the original SecurityManager
- securityManager.checkPermission(perm, context);
+ originalSecurityManager.checkPermission(perm, context);
}
}
@Override
public void checkPermission(Permission perm) {
- if (securityManager != null) {
+ if (originalSecurityManager != null) {
// check everything with the original SecurityManager
- securityManager.checkPermission(perm);
+ originalSecurityManager.checkPermission(perm);
}
}
@@ -592,15 +581,15 @@ public class LauncherAM {
return exitCode;
}
- public void set() {
+ public void enable() {
if (System.getSecurityManager() != this) {
System.setSecurityManager(this);
}
}
- public void unset() {
+ public void disable() {
if (System.getSecurityManager() == this) {
- System.setSecurityManager(securityManager);
+ System.setSecurityManager(originalSecurityManager);
}
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/739d53a4/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
index 052673d..777cfdd 100644
--- a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
+++ b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
@@ -56,7 +56,7 @@ import static org.mockito.Mockito.verifyZeroInteractions;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
-import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
import java.util.Map;
import java.util.Properties;
@@ -137,7 +137,7 @@ public class TestLauncherAM {
private ExpectedFailureDetails failureDetails = new ExpectedFailureDetails();
@Before
- public void setup() throws IOException {
+ public void setup() throws Exception {
configureMocksForHappyPath();
launcherJobConfig.set(LauncherMapper.OOZIE_ACTION_RECOVERY_ID, "1");
instantiateLauncher();
@@ -185,6 +185,20 @@ public class TestLauncherAM {
}
@Test
+ public void testLauncherClassNotDefined() throws Exception {
+ launcherJobConfig.unset(LauncherAM.CONF_OOZIE_ACTION_MAIN_CLASS);
+
+ executeLauncher();
+
+ failureDetails.expectedExceptionMessage("Launcher class should not be null")
+ .expectedErrorCode(EXIT_CODE_0)
+ .expectedErrorReason("Launcher class should not be null")
+ .withStackTrace();
+
+ assertFailedExecution();
+ }
+
+ @Test
public void testMainIsSuccessfullyInvokedAndAsyncErrorReceived() throws Exception {
ErrorHolder errorHolder = new ErrorHolder();
errorHolder.setErrorCode(6);
@@ -394,7 +408,7 @@ public class TestLauncherAM {
failureDetails.expectedExceptionMessage("IO error")
.expectedErrorCode(EXIT_CODE_0)
- .expectedErrorReason("IO error, IO error")
+ .expectedErrorReason("IO error")
.withStackTrace();
assertFailedExecution();
@@ -419,7 +433,7 @@ public class TestLauncherAM {
failureDetails.expectedExceptionMessage("IO error")
.expectedErrorCode(EXIT_CODE_0)
- .expectedErrorReason("IO error, IO error")
+ .expectedErrorReason("IO error")
.withStackTrace();
verify(hdfsOperationsMock).readFileContents(any(Path.class), eq(launcherJobConfig));
@@ -452,8 +466,8 @@ public class TestLauncherAM {
containerId);
}
- @SuppressWarnings("unchecked")
- private void configureMocksForHappyPath() throws IOException {
+ @SuppressWarnings("unchecked")
+ private void configureMocksForHappyPath() throws Exception {
launcherJobConfig.set(LauncherAM.OOZIE_ACTION_DIR_PATH, "dummy");
launcherJobConfig.set(LauncherAM.OOZIE_JOB_ID, "dummy");
launcherJobConfig.set(LauncherAM.OOZIE_ACTION_ID, "dummy");
@@ -461,12 +475,11 @@ public class TestLauncherAM {
given(localFsOperationsMock.readLauncherConf()).willReturn(launcherJobConfig);
given(localFsOperationsMock.fileExists(any(File.class))).willReturn(true);
-
willReturn(amRmAsyncClientMock).given(amRMClientAsyncFactoryMock).createAMRMClientAsync(anyInt());
- given(ugiMock.doAs(any(PrivilegedAction.class))).willAnswer(new Answer<Object>() {
+ given(ugiMock.doAs(any(PrivilegedExceptionAction.class))).willAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
- PrivilegedAction<?> action = (PrivilegedAction<?>) invocation.getArguments()[0];
+ PrivilegedExceptionAction<?> action = (PrivilegedExceptionAction<?>) invocation.getArguments()[0];
return action.run();
}
});
@@ -498,7 +511,7 @@ public class TestLauncherAM {
verify(amRmAsyncClientMock).registerApplicationMaster(anyString(), anyInt(), anyString());
verify(amRmAsyncClientMock).unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, EMPTY_STRING, EMPTY_STRING);
verify(amRmAsyncClientMock).stop();
- verify(ugiMock, times(2)).doAs(any(PrivilegedAction.class)); // prepare & action main
+ verify(ugiMock, times(2)).doAs(any(PrivilegedExceptionAction.class)); // prepare & action main
verify(hdfsOperationsMock).uploadActionDataToHDFS(any(Configuration.class), any(Path.class), any(Map.class));
verify(launcherCallbackNotifierFactoryMock).createCallbackNotifier(any(Configuration.class));
verify(launcherCallbackNotifierMock).notifyURL(actionResult);
[03/48] oozie git commit: OOZIE-2719 Test case failure (abhishekbafna
via jaydeepvishwakarma)
Posted by pb...@apache.org.
OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
(cherry picked from commit 225a88cb98012a98adf460363a8dc0ecd8fbfbaf)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/8250fbd4
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/8250fbd4
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/8250fbd4
Branch: refs/heads/oya
Commit: 8250fbd49c75382090b052291f2da7a4e4d17f5d
Parents: 7bd1cfb
Author: jvishwakarma <jv...@walmartlabs.com>
Authored: Wed Nov 2 23:01:00 2016 +0530
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Wed Nov 9 09:59:23 2016 +0100
----------------------------------------------------------------------
.../org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java | 2 +-
release-log.txt | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/8250fbd4/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
index 9e439b4..a3c7b14 100644
--- a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
+++ b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
@@ -1379,7 +1379,7 @@ public class TestLiteWorkflowAppParser extends XTestCase {
} catch (WorkflowException we) {
assertEquals(ErrorCode.E0757, we.getErrorCode());
assertTrue(we.getMessage().contains("Fork node [f]"));
- assertTrue(we.getMessage().contains("[j2,j1]"));
+ assertTrue(we.getMessage().contains("[j2,j1]") || we.getMessage().contains("[j1,j2]"));
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/8250fbd4/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 565e844..70ffaa6 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -9,6 +9,7 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
+OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
OOZIE-2674 Improve oozie commads documentation (abhishekbafna via rkanter)
OOZIE-2710 Oozie HCatalog example workflow fails (abhishekbafna via shwethags)
OOZIE-2705 Oozie Spark action ignores spark.executor.extraJavaOptions and spark.driver.extraJavaOptions (gezapeti via rkanter)
[20/48] oozie git commit: ammending release-log.txt for OOZIE-2690
Posted by pb...@apache.org.
ammending release-log.txt for OOZIE-2690
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/53554e89
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/53554e89
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/53554e89
Branch: refs/heads/oya
Commit: 53554e89aeb472b0ccecfa31c9788580010834c6
Parents: 0533700
Author: jvishwakarma <jv...@walmartlabs.com>
Authored: Mon Nov 21 15:13:37 2016 +0530
Committer: jvishwakarma <jv...@walmartlabs.com>
Committed: Mon Nov 21 15:13:37 2016 +0530
----------------------------------------------------------------------
release-log.txt | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/53554e89/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index acfac17..e85778e 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2690 OOZIE NPE while executing kill() (abhishekbafna via jaydeepvishwakarma)
OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
[11/48] oozie git commit: OOZIE-2725 Upgrade Tomcat to 6.0.47 for the
latest security fixes (rkanter via shwethags)
Posted by pb...@apache.org.
OOZIE-2725 Upgrade Tomcat to 6.0.47 for the latest security fixes (rkanter via shwethags)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/86009e9d
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/86009e9d
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/86009e9d
Branch: refs/heads/oya
Commit: 86009e9dac6cfea86684c0aa6d3c324af986390e
Parents: 7cebc26
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Tue Nov 15 15:20:41 2016 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Tue Nov 15 15:20:41 2016 +0530
----------------------------------------------------------------------
docs/src/site/twiki/DG_QuickStart.twiki | 2 +-
docs/src/site/twiki/ENG_Building.twiki | 2 +-
pom.xml | 4 ++--
release-log.txt | 1 +
4 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/86009e9d/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki b/docs/src/site/twiki/DG_QuickStart.twiki
index 1eb02a9..6dec179 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -50,7 +50,7 @@ profile depending on the hadoop version used.
-Dpig.classifier=<classifier> - default none
-Dsqoop.version=<version> - default 1.4.3
-Dsqoop.classifier=<classifier> - default hadoop100
--Dtomcat.version=<version> - default 6.0.41
+-Dtomcat.version=<version> - default 6.0.47
-Dopenjpa.version=<version> - default 2.2.2
-Dxerces.version=<version> - default 2.10.0
-Dcurator.version=<version> - default 2.5.0
http://git-wip-us.apache.org/repos/asf/oozie/blob/86009e9d/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/ENG_Building.twiki b/docs/src/site/twiki/ENG_Building.twiki
index 3560a9b..0766976 100644
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@ -221,7 +221,7 @@ profile depending on the hadoop version used.
-Dpig.classifier=<classifier> - default none
-Dsqoop.version=<version> - default 1.4.3
-Dsqoop.classifier=<classifier> - default hadoop100
--Dtomcat.version=<version> - default 6.0.41
+-Dtomcat.version=<version> - default 6.0.47
-Dopenjpa.version=<version> - default 2.2.2
-Dxerces.version=<version> - default 2.10.0
-Dcurator.version=<version> - default 2.5.0
http://git-wip-us.apache.org/repos/asf/oozie/blob/86009e9d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c9a19de..acedc6e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,8 +107,8 @@
<jetty.version>9.2.19.v20160908</jetty.version>
- <!-- Tomcat version -->
- <tomcat.version>6.0.44</tomcat.version>
+ <!-- Tomcat version -->
+ <tomcat.version>6.0.47</tomcat.version>
<jline.version>0.9.94</jline.version>
<openjpa.version>2.4.1</openjpa.version>
<xerces.version>2.10.0</xerces.version>
http://git-wip-us.apache.org/repos/asf/oozie/blob/86009e9d/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index fead396..9dd6adb 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -10,6 +10,7 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
+OOZIE-2725 Upgrade Tomcat to 6.0.47 for the latest security fixes (rkanter via shwethags)
OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
OOZIE-2674 Improve oozie commads documentation (abhishekbafna via rkanter)
[43/48] oozie git commit: OOZIE-2740 oozie help misspelled
coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
Posted by pb...@apache.org.
OOZIE-2740 oozie help misspelled coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/ab6ae75e
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/ab6ae75e
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/ab6ae75e
Branch: refs/heads/oya
Commit: ab6ae75e316be2d7eb658f929cf557520984573d
Parents: 93c17cc
Author: Robert Kanter <rk...@cloudera.com>
Authored: Thu Dec 1 09:03:31 2016 -0800
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Thu Dec 1 09:03:31 2016 -0800
----------------------------------------------------------------------
.../java/org/apache/oozie/cli/OozieCLI.java | 22 ++++++++++++--------
release-log.txt | 1 +
2 files changed, 14 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/ab6ae75e/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/oozie/cli/OozieCLI.java b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
index 807cf22..e67fae9 100644
--- a/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
+++ b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
@@ -327,7 +327,7 @@ public class OozieCLI {
Option len = new Option(LEN_OPTION, true, "number of actions (default TOTAL ACTIONS, requires -info)");
Option filter = new Option(FILTER_OPTION, true,
"<key><comparator><value>[;<key><comparator><value>]*\n"
- + "(All Coordinator actions satisfying the filters will be retreived).\n"
+ + "(All Coordinator actions satisfying the filters will be retrieved).\n"
+ "key: status or nominaltime\n"
+ "comparator: =, !=, <, <=, >, >=. = is used as OR and others as AND\n"
+ "status: values are valid status like SUCCEEDED, KILLED etc. Only = and != apply for status\n"
@@ -352,12 +352,13 @@ public class OozieCLI {
"coordinator rerun/kill on action ids (requires -rerun/-kill); coordinator log retrieval on action ids"
+ "(requires -log)");
Option date = new Option(DATE_OPTION, true,
- "coordinator/bundle rerun on action dates (requires -rerun); coordinator log retrieval on action dates (requires -log)");
+ "coordinator/bundle rerun on action dates (requires -rerun); "
+ + "coordinator log retrieval on action dates (requires -log)");
Option rerun_coord = new Option(COORD_OPTION, true, "bundle rerun on coordinator names (requires -rerun)");
Option rerun_refresh = new Option(RERUN_REFRESH_OPTION, false,
"re-materialize the coordinator rerun actions (requires -rerun)");
Option rerun_nocleanup = new Option(RERUN_NOCLEANUP_OPTION, false,
- "do not clean up output-events of the coordiantor rerun actions (requires -rerun)");
+ "do not clean up output-events of the coordinator rerun actions (requires -rerun)");
Option rerun_failed = new Option(RERUN_FAILED_OPTION, false,
"runs the failed workflow actions of the coordinator actions (requires -rerun)");
Option property = OptionBuilder.withArgName("property=value").hasArgs(2).withValueSeparator().withDescription(
@@ -456,10 +457,10 @@ public class OozieCLI {
Option filter = new Option(FILTER_OPTION, true,
"text=<*>\\;user=<U>\\;name=<N>\\;group=<G>\\;status=<S>\\;frequency=<F>\\;unit=<M>" +
"\\;startcreatedtime=<SC>\\;endcreatedtime=<EC> \\;sortBy=<SB>\n" +
- "(text filter: matches partially with name and user or complete match with job ID" +
- "valid unit values are 'months', 'days', 'hours' or 'minutes'. " +
+ "(text filter: matches partially with name and user or complete match with job ID. " +
+ "Valid unit values are 'months', 'days', 'hours' or 'minutes'. " +
"startcreatedtime, endcreatedtime: time of format yyyy-MM-dd'T'HH:mm'Z'. " +
- "valid values for sortBy are 'createdTime' or 'lastModifiedTime'.)");
+ "Valid values for sortBy are 'createdTime' or 'lastModifiedTime'.)");
Option localtime = new Option(LOCAL_TIME_OPTION, false, "use local time (same as passing your time zone to -" +
TIME_ZONE_OPTION + "). Overrides -" + TIME_ZONE_OPTION + " option");
Option kill = new Option(KILL_OPTION, false, "bulk kill operation");
@@ -471,8 +472,10 @@ public class OozieCLI {
Option doAs = new Option(DO_AS_OPTION, true, "doAs user, impersonates as the specified user");
Option bulkMonitor = new Option(BULK_OPTION, true, "key-value pairs to filter bulk jobs response. e.g. bundle=<B>\\;" +
"coordinators=<C>\\;actionstatus=<S>\\;startcreatedtime=<SC>\\;endcreatedtime=<EC>\\;" +
- "startscheduledtime=<SS>\\;endscheduledtime=<ES>\\; bundle, coordinators and actionstatus can be multiple comma separated values" +
- "bundle and coordinators can be id(s) or appName(s) of those jobs. Specifying bundle is mandatory, other params are optional");
+ "startscheduledtime=<SS>\\;endscheduledtime=<ES>\\; bundle, " +
+ "coordinators and actionstatus can be multiple comma separated values. " +
+ "Bundle and coordinators can be id(s) or appName(s) of those jobs. " +
+ "Specifying bundle is mandatory, other params are optional");
start.setType(Integer.class);
len.setType(Integer.class);
Options jobsOptions = new Options();
@@ -1369,7 +1372,8 @@ public class OozieCLI {
for (CoordinatorAction action : actions) {
System.out.println(String.format(COORD_ACTION_FORMATTER, maskIfNull(action.getId()),
action.getStatus(), maskIfNull(action.getExternalId()), maskIfNull(action.getErrorCode()),
- maskDate(action.getCreatedTime(), timeZoneId, verbose), maskDate(action.getNominalTime(), timeZoneId, verbose),
+ maskDate(action.getCreatedTime(), timeZoneId, verbose),
+ maskDate(action.getNominalTime(), timeZoneId, verbose),
maskDate(action.getLastModifiedTime(), timeZoneId, verbose)));
System.out.println(RULER);
http://git-wip-us.apache.org/repos/asf/oozie/blob/ab6ae75e/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index bd7219c..c3b7ab7 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2740 oozie help misspelled coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
OOZIE-2690 OOZIE NPE while executing kill() (abhishekbafna via jaydeepvishwakarma)
OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
[35/48] oozie git commit: Revert "OOZIE-2729 change JT to RM in
methods"
Posted by pb...@apache.org.
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java b/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
index 5e5e94a..6ab9fce 100644
--- a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
+++ b/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
@@ -92,7 +92,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
assertEquals(classes, ae.getLauncherClasses());
Element actionXml = XmlUtils.parseXml("<pig>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<script>SCRIPT</script>" +
"<param>a=A</param>" +
@@ -342,7 +342,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
w.close();
String actionXml = "<pig>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
setPigConfig(writeStats).toXmlString(false) +
"<script>" + script.getName() + "</script>" +
@@ -376,7 +376,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
w.close();
String actionXml = "<pig>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
setPigConfig(true).toXmlString(false) +
"<script>" + script.getName() + "</script>" +
@@ -421,7 +421,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<pig>" +
- " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <script>id.pig</script>" +
" <file>" + jar.toString() +
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
index a0ff58b..9d8d4aa 100644
--- a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
+++ b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
@@ -24,9 +24,15 @@ import java.util.ArrayList;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
@@ -55,7 +61,7 @@ public class TestPyspark extends ActionExecutorTestCase {
"<jar>" + PI_EXAMPLE + "</jar>" +
"<spark-opts>" +sparkOpts +"</spark-opts>" +
"</spark>";
- return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri());
+ return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri());
}
public void testPyspark() throws Exception {
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
index 51e412c..d97f1f0 100644
--- a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
+++ b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
@@ -21,10 +21,16 @@ package org.apache.oozie.action.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.SparkConfigurationService;
import org.apache.oozie.service.WorkflowAppService;
@@ -43,6 +49,7 @@ import java.io.Writer;
import java.text.MessageFormat;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
@@ -85,7 +92,7 @@ public class TestSparkActionExecutor extends ActionExecutorTestCase {
SparkConfigurationService scs = Services.get().get(SparkConfigurationService.class);
scs.destroy();
ConfigurationService.set("oozie.service.SparkConfigurationService.spark.configurations",
- getResourceManagerUri() + "=" + sparkConfDir.getAbsolutePath());
+ getJobTrackerUri() + "=" + sparkConfDir.getAbsolutePath());
scs.init(Services.get());
_testSetupMethods("local[*]", new HashMap<String, String>(), "client");
@@ -102,7 +109,7 @@ public class TestSparkActionExecutor extends ActionExecutorTestCase {
assertEquals(Arrays.asList(SparkMain.class), ae.getLauncherClasses());
Element actionXml = XmlUtils.parseXml("<spark>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<master>" + master + "</master>" +
(mode != null ? "<mode>" + mode + "</mode>" : "") +
@@ -155,7 +162,7 @@ public class TestSparkActionExecutor extends ActionExecutorTestCase {
"<arg>" + getAppPath() + "/" + OUTPUT + "</arg>" +
"<spark-opts>--conf " +SPARK_TESTING_MEMORY+"</spark-opts>"+
"</spark>";
- return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri());
+ return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri());
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
index 691752d..61d295c 100644
--- a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
+++ b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
@@ -22,12 +22,21 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XmlUtils;
+import org.jdom.Element;
+import org.jdom.Namespace;
import java.io.BufferedReader;
import java.io.File;
@@ -35,6 +44,7 @@ import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
+import java.io.StringReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
@@ -42,6 +52,7 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.Properties;
public class TestSqoopActionExecutor extends ActionExecutorTestCase {
@@ -136,19 +147,19 @@ public class TestSqoopActionExecutor extends ActionExecutorTestCase {
private String getActionXml() {
String command = MessageFormat.format(SQOOP_COMMAND, getActionJdbcUri(), getSqoopOutputDir());
- return MessageFormat.format(SQOOP_ACTION_COMMAND_XML, getResourceManagerUri(), getNameNodeUri(),
+ return MessageFormat.format(SQOOP_ACTION_COMMAND_XML, getJobTrackerUri(), getNameNodeUri(),
"dummy", "dummyValue", command);
}
private String getActionXmlEval() {
String query = "select TT.I, TT.S from TT";
- return MessageFormat.format(SQOOP_ACTION_EVAL_XML, getResourceManagerUri(), getNameNodeUri(),
+ return MessageFormat.format(SQOOP_ACTION_EVAL_XML, getJobTrackerUri(), getNameNodeUri(),
getActionJdbcUri(), query);
}
private String getActionXmlFreeFromQuery() {
String query = "select TT.I, TT.S from TT where $CONDITIONS";
- return MessageFormat.format(SQOOP_ACTION_ARGS_XML, getResourceManagerUri(), getNameNodeUri(),
+ return MessageFormat.format(SQOOP_ACTION_ARGS_XML, getJobTrackerUri(), getNameNodeUri(),
getActionJdbcUri(), query, getSqoopOutputDir());
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
index 78936c4..fc75968 100644
--- a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
+++ b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
@@ -86,7 +86,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
}
public Element createUberJarActionXML(String uberJarPath, String additional) throws Exception{
- return XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ return XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + additional + "<configuration>"
+ "<property><name>oozie.mapreduce.uber.jar</name><value>" + uberJarPath + "</value></property>"
+ "</configuration>" + "</map-reduce>");
@@ -125,7 +125,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
Configuration conf = new XConfiguration();
conf.set("nameNode", getNameNodeUri());
- conf.set("jobTracker", getResourceManagerUri());
+ conf.set("jobTracker", getJobTrackerUri());
conf.set(OozieClient.USER_NAME, getTestUser());
conf.set(OozieClient.APP_PATH, new File(getTestCaseDir(), "workflow.xml").toURI().toString());
conf.set(OozieClient.LOG_TOKEN, "t");
@@ -151,7 +151,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
Element eConf = eAction.getChild("name-node", eAction.getNamespace());
assertEquals(getNameNodeUri(), eConf.getText());
eConf = eAction.getChild("job-tracker", eAction.getNamespace());
- assertEquals(getResourceManagerUri(), eConf.getText());
+ assertEquals(getJobTrackerUri(), eConf.getText());
// check other m-r settings
eConf = eAction.getChild("configuration", eAction.getNamespace());
@@ -220,7 +220,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
List<Class<?>> classes = Arrays.<Class<?>>asList(StreamingMain.class);
assertEquals(classes, ae.getLauncherClasses());
- Element actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ Element actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.input.dir</name><value>IN</value></property>"
+ "<property><name>mapred.output.dir</name><value>OUT</value></property>" + "</configuration>"
@@ -282,7 +282,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
launcherJobConf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, conf);
assertNull(launcherJobConf.getJar()); // same for launcher conf (not set)
- actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "</map-reduce>");
conf = ae.createBaseHadoopConf(context, actionXml);
ae.setupActionConf(conf, context, actionXml, getFsTestCaseDir());
@@ -305,7 +305,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
}
serv.getConf().setBoolean("oozie.action.mapreduce.uber.jar.enable", originalUberJarDisabled);
- actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<streaming>" + "<mapper>M</mapper>"
+ "<reducer>R</reducer>" + "<record-reader>RR</record-reader>"
+ "<record-reader-mapping>RRM1=1</record-reader-mapping>"
@@ -323,7 +323,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
assertEquals("2", conf.get("oozie.streaming.record-reader-mapping.size"));
assertEquals("2", conf.get("oozie.streaming.env.size"));
- actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<pipes>" + "<map>M</map>" + "<reduce>R</reduce>"
+ "<inputformat>IF</inputformat>" + "<partitioner>P</partitioner>" + "<writer>W</writer>"
+ "<program>PP</program>" + "</pipes>" + "<configuration>"
@@ -540,7 +540,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "</map-reduce>";
_testSubmit(MAP_REDUCE, actionXml);
@@ -561,7 +561,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
ow.close();
String actionXml = "<map-reduce>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<configuration>" +
"<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName() +
@@ -591,7 +591,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
XConfiguration conf = getMapReduceConfig(inputDir.toString(), outputDir.toString());
conf.set(MapperReducerForTest.JOB_XML_OUTPUT_LOCATION, jobXml.toUri().toString());
conf.set("B", "b");
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ conf.toXmlString(false)
+ "<config-class>" + OozieActionConfiguratorForTest.class.getName() + "</config-class>" + "</map-reduce>";
@@ -615,7 +615,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceConfig(inputDir.toString(), outputDir.toString()).toXmlString(false)
+ "<config-class>org.apache.oozie.does.not.exist</config-class>" + "</map-reduce>";
@@ -645,7 +645,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
XConfiguration conf = getMapReduceConfig(inputDir.toString(), outputDir.toString());
conf.setBoolean("oozie.test.throw.exception", true); // causes OozieActionConfiguratorForTest to throw an exception
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ conf.toXmlString(false)
+ "<config-class>" + OozieActionConfiguratorForTest.class.getName() + "</config-class>" + "</map-reduce>";
@@ -710,7 +710,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceCredentialsConfig(inputDir.toString(), outputDir.toString()).toXmlString(false)
+ "</map-reduce>";
@@ -774,7 +774,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceUberJarConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "</map-reduce>";
String jobID = _testSubmit(MAP_REDUCE, actionXml);
@@ -862,7 +862,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + " <streaming>" + " <mapper>cat</mapper>"
+ " <reducer>wc</reducer>" + " </streaming>"
+ streamingConf.toXmlString(false) + "<file>"
@@ -952,7 +952,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + " <pipes>" + " <program>" + programPath
+ "#wordcount-simple" + "</program>" + " </pipes>"
+ getPipesConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "<file>"
@@ -983,7 +983,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
// configuration.
String actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1055,7 +1055,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
// configuration.
String actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1122,7 +1122,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
// configuration.
String actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1161,7 +1161,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1206,7 +1206,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
mrConfig.set("mapred.job.name", mapredJobName);
StringBuilder sb = new StringBuilder("<map-reduce>")
- .append("<job-tracker>").append(getResourceManagerUri())
+ .append("<job-tracker>").append(getJobTrackerUri())
.append("</job-tracker>").append("<name-node>")
.append(getNameNodeUri()).append("</name-node>")
.append(mrConfig.toXmlString(false)).append("</map-reduce>");
@@ -1303,7 +1303,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<map-reduce>" +
- " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <main-class>CLASS</main-class>" +
" <file>" + jar.toString() +
[13/48] oozie git commit: updated release-log
Posted by pb...@apache.org.
updated release-log
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/3ee71d4f
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/3ee71d4f
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/3ee71d4f
Branch: refs/heads/oya
Commit: 3ee71d4ff5a5d74f8da7f92db0a1b096e1e1c5fc
Parents: 586f522
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Wed Nov 16 10:32:38 2016 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Wed Nov 16 10:32:38 2016 +0530
----------------------------------------------------------------------
release-log.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/3ee71d4f/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index add46f9..5e9b8ff 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -10,7 +10,7 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
-OOZIE-2723 JSON.org license is now CatX (abhishekbafna via shwethags)
+OOZIE-2723 JSON.org license is now CatX (rkanter, abhishekbafna via shwethags)
OOZIE-2725 Upgrade Tomcat to 6.0.47 for the latest security fixes (rkanter via shwethags)
OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
OOZIE-2719 Test case failure (abhishekbafna via jaydeepvishwakarma)
[34/48] oozie git commit: Revert "test server resources"
Posted by pb...@apache.org.
Revert "test server resources"
This reverts commit 095c584b5168ebea32fca46683fb05c072a204bc.
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/04b96c66
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/04b96c66
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/04b96c66
Branch: refs/heads/oya
Commit: 04b96c66cafb5b5ef62bb52bd334ed87497c147e
Parents: 0e9162d
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Mon Nov 28 13:53:43 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Mon Nov 28 13:53:43 2016 +0100
----------------------------------------------------------------------
.../test/java/org/apache/oozie/test/TestConfigurations.java | 7 +++----
core/src/test/java/org/apache/oozie/test/XTestCase.java | 5 ++---
2 files changed, 5 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/04b96c66/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
index 6942aad..c6c1d23 100644
--- a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
+++ b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
@@ -36,8 +36,7 @@ class TestConfigurations {
yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
- yarnConfig.set(YarnConfiguration.NM_VCORES, "16");
- yarnConfig.set(YarnConfiguration.NM_PMEM_MB, "16000");
+
return yarnConfig;
}
@@ -57,10 +56,10 @@ class TestConfigurations {
return jobConf;
}
- JobConf createPristineJobConf(final String resouceManagerUri, final String nameNodeUri) {
+ JobConf createPristineJobConf(final String jobTrackerUri, final String nameNodeUri) {
final JobConf jobConf = new JobConf();
- jobConf.set("yarn.resourcemanager.address", resouceManagerUri);
+ jobConf.set("mapred.job.tracker", jobTrackerUri);
jobConf.set("fs.default.name", nameNodeUri);
return jobConf;
http://git-wip-us.apache.org/repos/asf/oozie/blob/04b96c66/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 53d0a97..3f39f31 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -34,7 +34,6 @@ import junit.framework.TestCase;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -511,7 +510,7 @@ public abstract class XTestCase extends TestCase {
private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null;
- private static MiniMRYarnCluster yarnCluster = null;
+ private static MiniYARNCluster yarnCluster = null;
private static MiniHCatServer hcatServer = null;
private static MiniHS2 hiveserver2 = null;
private static HiveConf hs2Config = null;
@@ -562,7 +561,7 @@ public abstract class XTestCase extends TestCase {
private void setupYarnCluster(final JobConf dfsConfig) {
final Configuration yarnConfig = testConfigurations.createYarnConfig(dfsConfig);
- yarnCluster = new MiniMRYarnCluster(this.getClass().getName());
+ yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
yarnCluster.init(yarnConfig);
yarnCluster.start();
}
[21/48] oozie git commit: OOZIE-2591 Fix recovery handling
Posted by pb...@apache.org.
OOZIE-2591 Fix recovery handling
Change-Id: I7501411b2bdcdc1962e5ac77082a71c96b205902
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/3b6daff5
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/3b6daff5
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/3b6daff5
Branch: refs/heads/oya
Commit: 3b6daff59a58c5b5b30a123c4ca75d6c20b4e30d
Parents: ba68347
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Fri Nov 18 11:13:00 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Mon Nov 21 14:28:36 2016 +0100
----------------------------------------------------------------------
.../oozie/action/hadoop/JavaActionExecutor.java | 81 ++++++++-----------
.../oozie/action/hadoop/LauncherMainTester.java | 1 +
.../oozie/service/TestRecoveryService.java | 26 ++-----
.../oozie/action/hadoop/HdfsOperations.java | 50 ++++++++++++
.../apache/oozie/action/hadoop/LauncherAM.java | 78 +++++++++++--------
.../oozie/action/hadoop/TestLauncherAM.java | 82 +++++++++++++++++++-
6 files changed, 211 insertions(+), 107 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/3b6daff5/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index 284690b..2ec5266 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AccessControlException;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
@@ -103,6 +104,7 @@ import org.jdom.JDOMException;
import org.jdom.Namespace;
import com.google.common.collect.ImmutableList;
+import com.google.common.io.Closeables;
public class JavaActionExecutor extends ActionExecutor {
@@ -958,6 +960,7 @@ public class JavaActionExecutor extends ActionExecutor {
public void submitLauncher(FileSystem actionFs, final Context context, WorkflowAction action) throws ActionExecutorException {
JobClient jobClient = null;
boolean exception = false;
+ YarnClient yarnClient = null;
try {
Path appPathRoot = new Path(context.getWorkflow().getAppPath());
@@ -1014,23 +1017,23 @@ public class JavaActionExecutor extends ActionExecutor {
}
JobConf launcherJobConf = createLauncherConf(actionFs, context, action, actionXml, actionConf);
- boolean alreadyRunning = false;
- String launcherId = null;
- String consoleUrl = null;
- // TODO: OYA: equivalent of this? (recovery, alreadyRunning) When does this happen?
-// LOG.debug("Creating Job Client for action " + action.getId());
-// jobClient = createJobClient(context, launcherJobConf);
-// launcherId = LauncherMapperHelper.getRecoveryId(launcherJobConf, context.getActionDir(), context
-// .getRecoveryId());
-// alreadyRunning = launcherId != null;
- RunningJob runningJob;
+ String consoleUrl;
+ String launcherId = LauncherMapperHelper.getRecoveryId(launcherJobConf, context.getActionDir(), context
+ .getRecoveryId());
+ boolean alreadyRunning = launcherId != null;
// if user-retry is on, always submit new launcher
boolean isUserRetry = ((WorkflowActionBean)action).isUserRetry();
+ yarnClient = createYarnClient(context, launcherJobConf);
if (alreadyRunning && !isUserRetry) {
- runningJob = jobClient.getJob(JobID.forName(launcherId));
- if (runningJob == null) {
+ try {
+ ApplicationId appId = ConverterUtils.toApplicationId(launcherId);
+ ApplicationReport report = yarnClient.getApplicationReport(appId);
+ consoleUrl = report.getTrackingUrl();
+ } catch (RemoteException e) {
+ // caught when the application id does not exist
+ LOG.error("Got RemoteException from YARN", e);
String jobTracker = launcherJobConf.get(HADOOP_YARN_RM);
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
"unknown job [{0}@{1}], cannot recover", launcherId, jobTracker);
@@ -1070,32 +1073,18 @@ public class JavaActionExecutor extends ActionExecutor {
LOG.info("No need to inject credentials.");
}
- YarnClient yarnClient = null;
- try {
- String user = context.getWorkflow().getUser();
-
- // Create application
- yarnClient = createYarnClient(context, launcherJobConf);
- YarnClientApplication newApp = yarnClient.createApplication();
- ApplicationId appId = newApp.getNewApplicationResponse().getApplicationId();
-
- // Create launch context for app master
- ApplicationSubmissionContext appContext =
- createAppSubmissionContext(appId, launcherJobConf, user, context, actionConf);
-
- // Submit the launcher AM
- yarnClient.submitApplication(appContext);
-
- launcherId = appId.toString();
- LOG.debug("After submission get the launcherId [{0}]", launcherId);
- ApplicationReport appReport = yarnClient.getApplicationReport(appId);
- consoleUrl = appReport.getTrackingUrl();
- } finally {
- if (yarnClient != null) {
- yarnClient.close();
- yarnClient = null;
- }
- }
+ String user = context.getWorkflow().getUser();
+
+ YarnClientApplication newApp = yarnClient.createApplication();
+ ApplicationId appId = newApp.getNewApplicationResponse().getApplicationId();
+ ApplicationSubmissionContext appContext =
+ createAppSubmissionContext(appId, launcherJobConf, user, context, actionConf);
+ yarnClient.submitApplication(appContext);
+
+ launcherId = appId.toString();
+ LOG.debug("After submission get the launcherId [{0}]", launcherId);
+ ApplicationReport appReport = yarnClient.getApplicationReport(appId);
+ consoleUrl = appReport.getTrackingUrl();
}
String jobTracker = launcherJobConf.get(HADOOP_YARN_RM);
@@ -1106,6 +1095,10 @@ public class JavaActionExecutor extends ActionExecutor {
throw convertException(ex);
}
finally {
+ if (yarnClient != null) {
+ Closeables.closeQuietly(yarnClient);
+ }
+
if (jobClient != null) {
try {
jobClient.close();
@@ -1126,26 +1119,16 @@ public class JavaActionExecutor extends ActionExecutor {
Context context, Configuration actionConf)
throws IOException, HadoopAccessorException, URISyntaxException {
- // Create launch context for app master
ApplicationSubmissionContext appContext = Records.newRecord(ApplicationSubmissionContext.class);
- // set the application id
appContext.setApplicationId(appId);
-
- // set the application name
appContext.setApplicationName(launcherJobConf.getJobName());
appContext.setApplicationType("Oozie Launcher");
-
- // Set the priority for the application master
Priority pri = Records.newRecord(Priority.class);
int priority = 0; // TODO: OYA: Add a constant or a config
pri.setPriority(priority);
appContext.setPriority(pri);
-
- // Set the queue to which this application is to be submitted in the RM
appContext.setQueue(launcherJobConf.getQueueName());
-
- // Set up the container launch context for the application master
ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class);
// Set the resources to localize
@@ -1193,7 +1176,7 @@ public class JavaActionExecutor extends ActionExecutor {
vargs.add("-Dhadoop.root.logger=INFO,CLA");
vargs.add("-Dhadoop.root.logfile=" + TaskLog.LogName.SYSLOG);
vargs.add("-Dsubmitter.user=" + context.getWorkflow().getUser());
- vargs.add("org.apache.oozie.action.hadoop.LauncherAM"); // note: using string temporarily so we don't have to depend on sharelib-oozie
+ vargs.add(LauncherAM.class.getCanonicalName());
vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
Path.SEPARATOR + ApplicationConstants.STDOUT);
vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
http://git-wip-us.apache.org/repos/asf/oozie/blob/3b6daff5/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java b/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
index 4baed6e..c2aae4c 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
@@ -30,6 +30,7 @@ public class LauncherMainTester {
if (args.length == 0) {
System.out.println("Hello World!");
}
+
if (args.length == 1) {
if (args[0].equals("throwable")) {
throw new Throwable("throwing throwable");
http://git-wip-us.apache.org/repos/asf/oozie/blob/3b6daff5/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java b/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
index 8fd0c2d..a3270e9 100644
--- a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
@@ -21,10 +21,7 @@ package org.apache.oozie.service;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.BundleActionBean;
import org.apache.oozie.BundleJobBean;
import org.apache.oozie.CoordinatorActionBean;
@@ -250,20 +247,10 @@ public class TestRecoveryService extends XDataTestCase {
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job1, action1, false, false);
MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action1.getConf()));
- String user = conf.get("user.name");
- String group = conf.get("group.name");
- JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
-
String launcherId = action1.getExternalId();
- final RunningJob launcherJob = jobClient.getJob(JobID.forName(launcherId));
+ waitUntilYarnAppDoneAndAssertSuccess(launcherId);
- waitFor(240 * 1000, new Predicate() {
- public boolean evaluate() throws Exception {
- return launcherJob.isComplete();
- }
- });
- assertTrue(launcherJob.isSuccessful());
Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
conf);
assertTrue(LauncherMapperHelper.hasIdSwap(actionData));
@@ -274,10 +261,8 @@ public class TestRecoveryService extends XDataTestCase {
* @throws Exception
*/
public void testBundleRecoveryCoordCreate() throws Exception {
- final BundleActionBean bundleAction;
- final BundleJobBean bundle;
- bundle = addRecordToBundleJobTable(Job.Status.RUNNING, false);
- bundleAction = addRecordToBundleActionTable(bundle.getId(), "coord1", 1, Job.Status.PREP);
+ final BundleJobBean bundle = addRecordToBundleJobTable(Job.Status.RUNNING, false);
+ addRecordToBundleActionTable(bundle.getId(), "coord1", 1, Job.Status.PREP);
final JPAService jpaService = Services.get().get(JPAService.class);
sleep(3000);
@@ -290,7 +275,7 @@ public class TestRecoveryService extends XDataTestCase {
jpaService.execute(new BundleActionGetJPAExecutor(bundle.getId(), "coord1"));
try {
if (mybundleAction.getCoordId() != null) {
- CoordinatorJobBean coord = jpaService.execute(new CoordJobGetJPAExecutor(mybundleAction.getCoordId()));
+ jpaService.execute(new CoordJobGetJPAExecutor(mybundleAction.getCoordId()));
return true;
}
} catch (Exception e) {
@@ -345,12 +330,11 @@ public class TestRecoveryService extends XDataTestCase {
* @throws Exception
*/
public void testBundleRecoveryCoordExists() throws Exception {
- final BundleActionBean bundleAction;
final BundleJobBean bundle;
final CoordinatorJob coord;
bundle = addRecordToBundleJobTable(Job.Status.RUNNING, false);
coord = addRecordToCoordJobTable(Job.Status.PREP, false, false);
- bundleAction = addRecordToBundleActionTable(bundle.getId(), coord.getId(), "coord1", 1, Job.Status.PREP);
+ addRecordToBundleActionTable(bundle.getId(), coord.getId(), "coord1", 1, Job.Status.PREP);
final JPAService jpaService = Services.get().get(JPAService.class);
sleep(3000);
http://git-wip-us.apache.org/repos/asf/oozie/blob/3b6daff5/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/HdfsOperations.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/HdfsOperations.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/HdfsOperations.java
index 593de00..6f354a8 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/HdfsOperations.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/HdfsOperations.java
@@ -17,12 +17,18 @@
*/
package org.apache.oozie.action.hadoop;
+import java.io.BufferedReader;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
@@ -85,4 +91,48 @@ public class HdfsOperations {
throw ioe;
}
}
+
+ public boolean fileExists(final Path path, final Configuration launcherJobConf) throws IOException, InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ @Override
+ public Boolean run() throws Exception {
+ FileSystem fs = FileSystem.get(path.toUri(), launcherJobConf);
+ return fs.exists(path);
+ }
+ });
+ }
+
+ public void writeStringToFile(final Path path, final Configuration conf, final String contents) throws IOException, InterruptedException {
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ FileSystem fs = FileSystem.get(path.toUri(), conf);
+ java.io.Writer writer = new OutputStreamWriter(fs.create(path));
+ writer.write(contents);
+ writer.close();
+ return null;
+ }
+ });
+ }
+
+ public String readFileContents(final Path path, final Configuration conf) throws IOException, InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<String>() {
+ @Override
+ public String run() throws Exception {
+ FileSystem fs = FileSystem.get(path.toUri(), conf);
+ InputStream is = fs.open(path);
+ BufferedReader reader = new BufferedReader(new InputStreamReader(is));
+ StringBuilder sb = new StringBuilder();
+
+ String contents;
+ while ((contents = reader.readLine()) != null) {
+ sb.append(contents);
+ }
+
+ reader.close();
+
+ return sb.toString();
+ }
+ });
+ }
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/3b6daff5/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
index 89357ad..881fa72 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherAM.java
@@ -25,6 +25,7 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.Permission;
import java.security.PrivilegedAction;
+import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
@@ -34,7 +35,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
import org.apache.hadoop.yarn.exceptions.YarnException;
@@ -81,6 +83,7 @@ public class LauncherAM {
private final PrepareActionsHandler prepareHandler;
private final LauncherAMCallbackNotifierFactory callbackNotifierFactory;
private final LauncherSecurityManager launcherSecurityManager;
+ private final ContainerId containerId;
private Configuration launcherJobConf;
private AMRMClientAsync<?> amRmClientAsync;
@@ -94,7 +97,8 @@ public class LauncherAM {
LocalFsOperations localFsOperations,
PrepareActionsHandler prepareHandler,
LauncherAMCallbackNotifierFactory callbackNotifierFactory,
- LauncherSecurityManager launcherSecurityManager) {
+ LauncherSecurityManager launcherSecurityManager,
+ String containerId) {
this.ugi = Preconditions.checkNotNull(ugi, "ugi should not be null");
this.amRmClientAsyncFactory = Preconditions.checkNotNull(amRmClientAsyncFactory, "amRmClientAsyncFactory should not be null");
this.callbackHandler = Preconditions.checkNotNull(callbackHandler, "callbackHandler should not be null");
@@ -103,6 +107,7 @@ public class LauncherAM {
this.prepareHandler = Preconditions.checkNotNull(prepareHandler, "prepareHandler should not be null");
this.callbackNotifierFactory = Preconditions.checkNotNull(callbackNotifierFactory, "callbackNotifierFactory should not be null");
this.launcherSecurityManager = Preconditions.checkNotNull(launcherSecurityManager, "launcherSecurityManager should not be null");
+ this.containerId = ContainerId.fromString(Preconditions.checkNotNull(containerId, "containerId should not be null"));
}
public static void main(String[] args) throws Exception {
@@ -134,29 +139,16 @@ public class LauncherAM {
localFSOperations,
prepareHandler,
callbackNotifierFactory,
- launcherSecurityManager);
+ launcherSecurityManager,
+ System.getenv("CONTAINER_ID"));
launcher.run();
}
- // TODO: OYA: rethink all print messages and formatting
public void run() throws Exception {
final ErrorHolder errorHolder = new ErrorHolder();
OozieActionResult actionResult = OozieActionResult.FAILED;
boolean launcerExecutedProperly = false;
-
- String jobUserName = System.getenv(ApplicationConstants.Environment.USER.name());
-
- // DEBUG - will be removed
- UserGroupInformation login = UserGroupInformation.getLoginUser();
- System.out.println("Login: " + login.getUserName());
- System.out.println("SecurityEnabled:" + UserGroupInformation.isSecurityEnabled());
- System.out.println("Login keytab based:" + UserGroupInformation.isLoginKeytabBased());
- System.out.println("Login from keytab: " + login.isFromKeytab());
- System.out.println("Login has kerberos credentials: " + login.hasKerberosCredentials());
- System.out.println("Login authMethod: " + login.getAuthenticationMethod());
- System.out.println("JobUserName:" + jobUserName);
-
boolean backgroundAction = false;
try {
@@ -288,15 +280,20 @@ public class LauncherAM {
System.out.println("Java System Properties:");
System.out.println("------------------------");
System.getProperties().store(System.out, "");
- System.out.flush();
System.out.println("------------------------");
System.out.println();
+ System.out.println("Environment variables");
+ Map<String, String> env = System.getenv();
+ System.out.println("------------------------");
+ for (Map.Entry<String, String> entry : env.entrySet()) {
+ System.out.println(entry.getKey() + "=" + entry.getValue());
+ }
+ System.out.println("------------------------");
System.out.println("=================================================================");
System.out.println();
System.out.println(">>> Invoking Main class now >>>");
System.out.println();
- System.out.flush();
}
private void registerWithRM() throws IOException, YarnException {
@@ -317,7 +314,7 @@ public class LauncherAM {
// tracking url is determined automatically
amRmClientAsync.unregisterApplicationMaster(actionResult.getYarnStatus(), message, "");
} catch (Exception ex) {
- System.err.println("Error un-registering AM client");
+ System.out.println("Error un-registering AM client");
throw ex;
} finally {
amRmClientAsync.stop();
@@ -366,12 +363,7 @@ public class LauncherAM {
System.setProperty(ACTION_PREFIX + ACTION_DATA_OUTPUT_PROPS, new File(ACTION_DATA_OUTPUT_PROPS).getAbsolutePath());
System.setProperty(ACTION_PREFIX + ACTION_DATA_ERROR_PROPS, new File(ACTION_DATA_ERROR_PROPS).getAbsolutePath());
- // FIXME - make sure it's always set
- if (launcherJobConf.get("oozie.job.launch.time") != null) {
- System.setProperty("oozie.job.launch.time", launcherJobConf.get("oozie.job.launch.time"));
- } else {
- System.setProperty("oozie.job.launch.time", String.valueOf(System.currentTimeMillis()));
- }
+ System.setProperty("oozie.job.launch.time", String.valueOf(System.currentTimeMillis()));
}
private boolean runActionMain(final String[] mainArgs, final ErrorHolder eHolder, UserGroupInformation ugi) {
@@ -382,9 +374,9 @@ public class LauncherAM {
@Override
public Void run() {
try {
+ setRecoveryId();
Class<?> klass = launcherJobConf.getClass(CONF_OOZIE_ACTION_MAIN_CLASS, Object.class);
System.out.println("Launcher class: " + klass.toString());
- System.out.flush();
Method mainMethod = klass.getMethod("main", String[].class);
// Enable LauncherSecurityManager to catch System.exit calls
launcherSecurityManager.set();
@@ -412,7 +404,6 @@ public class LauncherAM {
if (launcherSecurityManager.getExitInvoked()) {
final int exitCode = launcherSecurityManager.getExitCode();
System.out.println("Intercepting System.exit(" + exitCode + ")");
- System.err.println("Intercepting System.exit(" + exitCode + ")");
// if 0 main() method finished successfully
// ignoring
eHolder.setErrorCode(exitCode);
@@ -438,8 +429,6 @@ public class LauncherAM {
eHolder.setErrorMessage(t.getMessage());
eHolder.setErrorCause(t);
} finally {
- System.out.flush();
- System.err.flush();
// Disable LauncherSecurityManager
launcherSecurityManager.unset();
}
@@ -451,6 +440,31 @@ public class LauncherAM {
return actionMainExecutedProperly.get();
}
+ private void setRecoveryId() throws LauncherException {
+ try {
+ ApplicationId applicationId = containerId.getApplicationAttemptId().getApplicationId();
+ String applicationIdStr = applicationId.toString();
+
+ String recoveryId = Preconditions.checkNotNull(launcherJobConf.get(LauncherMapper.OOZIE_ACTION_RECOVERY_ID),
+ "RecoveryID should not be null");
+
+ Path path = new Path(actionDir, recoveryId);
+ if (!hdfsOperations.fileExists(path, launcherJobConf)) {
+ hdfsOperations.writeStringToFile(path, launcherJobConf, applicationIdStr);
+ } else {
+ String id = hdfsOperations.readFileContents(path, launcherJobConf);
+
+ if (!applicationIdStr.equals(id)) {
+ throw new LauncherException(MessageFormat.format(
+ "YARN Id mismatch, action file [{0}] declares Id [{1}] current Id [{2}]", path, id,
+ applicationIdStr));
+ }
+ }
+ } catch (Exception ex) {
+ throw new LauncherException("IO error",ex);
+ }
+ }
+
private void handleActionData() throws IOException {
// external child IDs
processActionData(ACTION_PREFIX + ACTION_DATA_EXTERNAL_CHILD_IDS, null, ACTION_DATA_EXTERNAL_CHILD_IDS, -1, ACTIONOUTPUTTYPE_EXT_CHILD_ID);
@@ -516,14 +530,12 @@ public class LauncherAM {
}
}
} catch (IOException ioe) {
- System.err.println("A problem occured trying to fail the launcher");
+ System.out.println("A problem occured trying to fail the launcher");
ioe.printStackTrace();
} finally {
System.out.print("Failing Oozie Launcher, " + eHolder.getErrorMessage() + "\n");
- System.err.print("Failing Oozie Launcher, " + eHolder.getErrorMessage() + "\n");
if (eHolder.getErrorCause() != null) {
eHolder.getErrorCause().printStackTrace(System.out);
- eHolder.getErrorCause().printStackTrace(System.err);
}
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/3b6daff5/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
index 30441ea..052673d 100644
--- a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
+++ b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherAM.java
@@ -65,6 +65,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
+import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.oozie.action.hadoop.LauncherAM.LauncherSecurityManager;
import org.apache.oozie.action.hadoop.LauncherAM.OozieActionResult;
import org.junit.Before;
@@ -72,7 +73,6 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
@@ -80,6 +80,7 @@ import org.mockito.stubbing.Answer;
@RunWith(MockitoJUnitRunner.class)
public class TestLauncherAM {
+ private static final String DEFAULT_CONTAINER_ID = "container_1479473450392_0001_01_000001";
private static final String ACTIONDATA_ERROR_PROPERTIES = "error.properties";
private static final String ACTIONDATA_FINAL_STATUS_PROPERTY = "final.status";
private static final String ERROR_CODE_PROPERTY = "error.code";
@@ -108,7 +109,7 @@ public class TestLauncherAM {
private AMRMCallBackHandler callbackHandlerMock;
@Mock
- private HdfsOperations fsOperationsMock;
+ private HdfsOperations hdfsOperationsMock;
@Mock
private LocalFsOperations localFsOperationsMock;
@@ -127,7 +128,10 @@ public class TestLauncherAM {
private Configuration launcherJobConfig = new Configuration();
- @InjectMocks
+ private String containerId = DEFAULT_CONTAINER_ID;
+
+ private String applicationId = ConverterUtils.toContainerId(containerId).getApplicationAttemptId().getApplicationId().toString();
+
private LauncherAM launcherAM;
private ExpectedFailureDetails failureDetails = new ExpectedFailureDetails();
@@ -135,6 +139,8 @@ public class TestLauncherAM {
@Before
public void setup() throws IOException {
configureMocksForHappyPath();
+ launcherJobConfig.set(LauncherMapper.OOZIE_ACTION_RECOVERY_ID, "1");
+ instantiateLauncher();
}
@Test
@@ -379,6 +385,73 @@ public class TestLauncherAM {
}
}
+ @Test
+ public void testRecoveryIdNotSet() throws Exception {
+ launcherJobConfig.unset(LauncherMapper.OOZIE_ACTION_RECOVERY_ID);
+ instantiateLauncher();
+
+ executeLauncher();
+
+ failureDetails.expectedExceptionMessage("IO error")
+ .expectedErrorCode(EXIT_CODE_0)
+ .expectedErrorReason("IO error, IO error")
+ .withStackTrace();
+
+ assertFailedExecution();
+ }
+
+ @Test
+ public void testRecoveryIdExistsAndRecoveryIsdMatch() throws Exception {
+ given(hdfsOperationsMock.fileExists(any(Path.class), eq(launcherJobConfig))).willReturn(true);
+ given(hdfsOperationsMock.readFileContents(any(Path.class), eq(launcherJobConfig))).willReturn(applicationId);
+
+ executeLauncher();
+
+ verify(hdfsOperationsMock).readFileContents(any(Path.class), eq(launcherJobConfig));
+ }
+
+ @Test
+ public void testRecoveryIdExistsAndRecoveryIdsDoNotMatch() throws Exception {
+ given(hdfsOperationsMock.fileExists(any(Path.class), eq(launcherJobConfig))).willReturn(true);
+ given(hdfsOperationsMock.readFileContents(any(Path.class), eq(launcherJobConfig))).willReturn("not_matching_appid");
+
+ executeLauncher();
+
+ failureDetails.expectedExceptionMessage("IO error")
+ .expectedErrorCode(EXIT_CODE_0)
+ .expectedErrorReason("IO error, IO error")
+ .withStackTrace();
+
+ verify(hdfsOperationsMock).readFileContents(any(Path.class), eq(launcherJobConfig));
+ assertFailedExecution();
+ }
+
+ @Test
+ public void testReadingRecoveryIdFails() throws Exception {
+ willThrow(new IOException()).given(hdfsOperationsMock).writeStringToFile(any(Path.class), eq(launcherJobConfig), eq(applicationId));
+
+ executeLauncher();
+
+ failureDetails.expectedExceptionMessage("IO error")
+ .expectedErrorCode(EXIT_CODE_0)
+ .expectedErrorReason("IO error, IO error")
+ .withStackTrace();
+
+ assertFailedExecution();
+ }
+
+ private void instantiateLauncher() {
+ launcherAM = new LauncherAM(ugiMock,
+ amRMClientAsyncFactoryMock,
+ callbackHandlerMock,
+ hdfsOperationsMock,
+ localFsOperationsMock,
+ prepareHandlerMock,
+ launcherCallbackNotifierFactoryMock,
+ launcherSecurityManagerMock,
+ containerId);
+ }
+
@SuppressWarnings("unchecked")
private void configureMocksForHappyPath() throws IOException {
launcherJobConfig.set(LauncherAM.OOZIE_ACTION_DIR_PATH, "dummy");
@@ -426,9 +499,10 @@ public class TestLauncherAM {
verify(amRmAsyncClientMock).unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, EMPTY_STRING, EMPTY_STRING);
verify(amRmAsyncClientMock).stop();
verify(ugiMock, times(2)).doAs(any(PrivilegedAction.class)); // prepare & action main
- verify(fsOperationsMock).uploadActionDataToHDFS(any(Configuration.class), any(Path.class), any(Map.class));
+ verify(hdfsOperationsMock).uploadActionDataToHDFS(any(Configuration.class), any(Path.class), any(Map.class));
verify(launcherCallbackNotifierFactoryMock).createCallbackNotifier(any(Configuration.class));
verify(launcherCallbackNotifierMock).notifyURL(actionResult);
+ verify(hdfsOperationsMock).writeStringToFile(any(Path.class), any(Configuration.class), any(String.class));
Map<String, String> actionData = launcherAM.getActionData();
verifyFinalStatus(actionData, actionResult);
[28/48] oozie git commit: OOZIE-2594 finalize the impl of
MapReduceActionExecutor.kill(), adding tests
Posted by pb...@apache.org.
OOZIE-2594 finalize the impl of MapReduceActionExecutor.kill(), adding tests
Change-Id: I09dce58bbd3c7f4534210394e35f6681a62b9bc9
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/8d60f7f2
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/8d60f7f2
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/8d60f7f2
Branch: refs/heads/oya
Commit: 8d60f7f25647ff0839e62d3e245d8c6f875c57b1
Parents: 095c584
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Wed Nov 23 13:56:58 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Wed Nov 23 14:56:02 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/MapReduceActionExecutor.java | 40 +++++++++++----
.../oozie/action/hadoop/BlockingMapper.java | 52 ++++++++++++++++++++
.../action/hadoop/MapperReducerForTest.java | 10 ++--
.../hadoop/TestMapReduceActionExecutor.java | 45 +++++++++++++++++
4 files changed, 133 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/8d60f7f2/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
index e97de7e..11d1787 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
@@ -50,6 +50,11 @@ import org.apache.oozie.util.XmlUtils;
import org.jdom.Element;
import org.jdom.Namespace;
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterables;
+import com.google.common.io.Closeables;
+
public class MapReduceActionExecutor extends JavaActionExecutor {
public static final String OOZIE_ACTION_EXTERNAL_STATS_WRITE = "oozie.action.external.stats.write";
@@ -401,16 +406,15 @@ public class MapReduceActionExecutor extends JavaActionExecutor {
}
@Override
- public void kill(Context context, WorkflowAction action) throws ActionExecutorException {
+ public void kill(final Context context, final WorkflowAction action) throws ActionExecutorException {
// Kill the LauncherAM which submits the MR job
super.kill(context, action);
// We have to check whether the MapReduce execution has started or not. If it has started, then we have to get
// the YARN ApplicationID based on the tag and kill it as well
-
- // TODO: this must be tested in TestMapReduceActionExecutor
+ YarnClient yarnClient = null;
try {
- String tag = ActionExecutor.getActionYarnTag(new Configuration(), context.getWorkflow(), action);
+ String tag = LauncherMapperHelper.getTag(ActionExecutor.getActionYarnTag(new Configuration(), context.getWorkflow(), action));
GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
gar.setScope(ApplicationsRequestScope.ALL);
gar.setApplicationTags(Collections.singleton(tag));
@@ -420,16 +424,34 @@ public class MapReduceActionExecutor extends JavaActionExecutor {
GetApplicationsResponse apps = proxy.getApplications(gar);
List<ApplicationReport> appsList = apps.getApplicationList();
- YarnClient yarnClient = YarnClient.createYarnClient();
- yarnClient.init(actionConf);
- yarnClient.start();
+ if (appsList.size() > 1) {
+ String applications = Joiner.on(",").join(Iterables.transform(appsList, new Function<ApplicationReport, String>() {
+ @Override
+ public String apply(ApplicationReport input) {
+ return input.toString();
+ }
+ }));
+
+ LOG.error("Too many applications were returned: {0}", applications);
+ throw new IllegalArgumentException("Too many applications were returned");
+ } else if (appsList.size() == 1) {
- for (ApplicationReport app : appsList) {
- LOG.info("Killing MapReduce job {0}", app.getApplicationId().toString());
+ yarnClient = YarnClient.createYarnClient();
+ yarnClient.init(actionConf);
+ yarnClient.start();
+
+ ApplicationReport app = appsList.get(0);
+ LOG.info("Killing MapReduce job {0}, YARN Id: {1}", action.getExternalChildIDs(), app.getApplicationId().toString());
yarnClient.killApplication(app.getApplicationId());
+ } else {
+ LOG.info("No MapReduce job to kill");
}
} catch (Exception e) {
throw convertException(e);
+ } finally {
+ if (yarnClient != null) {
+ Closeables.closeQuietly(yarnClient);
+ }
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/8d60f7f2/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java b/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java
new file mode 100644
index 0000000..0f4dcd6
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.action.hadoop;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+
+// A mapper task that blocks forever
+public class BlockingMapper implements Mapper<Object, Object, Object, Object> {
+
+ @Override
+ public void configure(JobConf job) {
+ // nop
+ }
+
+ @Override
+ public void close() throws IOException {
+ // nop
+ }
+
+ @Override
+ public void map(Object key, Object value, OutputCollector<Object, Object> output, Reporter reporter)
+ throws IOException {
+ try {
+ synchronized (this) {
+ wait();
+ }
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/8d60f7f2/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java b/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
index 8f08ddd..75ac716 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.mapred.Reducer;
import java.io.IOException;
import java.util.Iterator;
-public class MapperReducerForTest implements Mapper, Reducer {
+public class MapperReducerForTest implements Mapper<Object, Object, Object, Object>, Reducer<Object, Object, Object, Object> {
public static final String GROUP = "g";
public static final String NAME = "c";
/**
@@ -66,14 +66,14 @@ public class MapperReducerForTest implements Mapper, Reducer {
public void close() throws IOException {
}
- @SuppressWarnings("unchecked")
- public void map(Object key, Object value, OutputCollector collector, Reporter reporter) throws IOException {
+ @Override
+ public void map(Object key, Object value, OutputCollector<Object, Object> collector, Reporter reporter) throws IOException {
collector.collect(key, value);
reporter.incrCounter(GROUP, NAME, 5l);
}
- @SuppressWarnings("unchecked")
- public void reduce(Object key, Iterator values, OutputCollector collector, Reporter reporter)
+ @Override
+ public void reduce(Object key, Iterator<Object> values, OutputCollector<Object, Object> collector, Reporter reporter)
throws IOException {
while (values.hasNext()) {
collector.collect(key, values.next());
http://git-wip-us.apache.org/repos/asf/oozie/blob/8d60f7f2/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
index a21b7c7..78936c4 100644
--- a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
+++ b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.streaming.StreamJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
@@ -494,6 +495,12 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
assertTrue(MapperReducerCredentialsForTest.hasCredentials(mrJob));
}
+ protected XConfiguration getSleepMapReduceConfig(String inputDir, String outputDir) {
+ XConfiguration conf = getMapReduceConfig(inputDir, outputDir);
+ conf.set("mapred.mapper.class", BlockingMapper.class.getName());
+ return conf;
+ }
+
protected XConfiguration getMapReduceConfig(String inputDir, String outputDir) {
XConfiguration conf = new XConfiguration();
conf.set("mapred.mapper.class", MapperReducerForTest.class.getName());
@@ -654,6 +661,44 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
assertTrue(errorProps.getProperty("exception.stacktrace").startsWith(OozieActionConfiguratorException.class.getName()));
}
+ public void testMapReduceActionKill() throws Exception {
+ FileSystem fs = getFileSystem();
+
+ Path inputDir = new Path(getFsTestCaseDir(), "input");
+ Path outputDir = new Path(getFsTestCaseDir(), "output");
+
+ Writer w = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
+ w.write("dummy\n");
+ w.write("dummy\n");
+ w.close();
+
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ + getNameNodeUri() + "</name-node>"
+ + getSleepMapReduceConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "</map-reduce>";
+
+ Context context = createContext(MAP_REDUCE, actionXml);
+ final String launcherId = submitAction(context);
+ // wait until LauncherAM terminates - the MR job keeps running the background
+ waitUntilYarnAppDoneAndAssertSuccess(launcherId);
+
+ MapReduceActionExecutor mae = new MapReduceActionExecutor();
+ mae.check(context, context.getAction()); // must be called so that externalChildIDs are read from HDFS
+ JobConf conf = mae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
+ String user = conf.get("user.name");
+ JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
+ final RunningJob mrJob = jobClient.getJob(JobID.forName(context.getAction().getExternalChildIDs()));
+
+ mae.kill(context, context.getAction());
+
+ waitFor(10_000, new Predicate() {
+ @Override
+ public boolean evaluate() throws Exception {
+ return mrJob.isComplete();
+ }
+ });
+ assertEquals(JobStatus.State.KILLED, mrJob.getJobStatus().getState());
+ }
+
public void testMapReduceWithCredentials() throws Exception {
FileSystem fs = getFileSystem();
[25/48] oozie git commit: test server resources
Posted by pb...@apache.org.
test server resources
Change-Id: I08674a52897aa5b10abb128b4458aec3b7f65e40
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/095c584b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/095c584b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/095c584b
Branch: refs/heads/oya
Commit: 095c584b5168ebea32fca46683fb05c072a204bc
Parents: 5c4a79b
Author: Peter Cseh <ge...@cloudera.com>
Authored: Tue Nov 22 17:59:52 2016 +0100
Committer: Peter Cseh <ge...@cloudera.com>
Committed: Tue Nov 22 18:00:04 2016 +0100
----------------------------------------------------------------------
.../test/java/org/apache/oozie/test/TestConfigurations.java | 7 ++++---
core/src/test/java/org/apache/oozie/test/XTestCase.java | 5 +++--
2 files changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/095c584b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
index c6c1d23..6942aad 100644
--- a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
+++ b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
@@ -36,7 +36,8 @@ class TestConfigurations {
yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
-
+ yarnConfig.set(YarnConfiguration.NM_VCORES, "16");
+ yarnConfig.set(YarnConfiguration.NM_PMEM_MB, "16000");
return yarnConfig;
}
@@ -56,10 +57,10 @@ class TestConfigurations {
return jobConf;
}
- JobConf createPristineJobConf(final String jobTrackerUri, final String nameNodeUri) {
+ JobConf createPristineJobConf(final String resouceManagerUri, final String nameNodeUri) {
final JobConf jobConf = new JobConf();
- jobConf.set("mapred.job.tracker", jobTrackerUri);
+ jobConf.set("yarn.resourcemanager.address", resouceManagerUri);
jobConf.set("fs.default.name", nameNodeUri);
return jobConf;
http://git-wip-us.apache.org/repos/asf/oozie/blob/095c584b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 9820167..b29556c 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -34,6 +34,7 @@ import junit.framework.TestCase;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -509,7 +510,7 @@ public abstract class XTestCase extends TestCase {
private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null;
- private static MiniYARNCluster yarnCluster = null;
+ private static MiniMRYarnCluster yarnCluster = null;
private static MiniHCatServer hcatServer = null;
private static MiniHS2 hiveserver2 = null;
private static HiveConf hs2Config = null;
@@ -560,7 +561,7 @@ public abstract class XTestCase extends TestCase {
private void setupYarnCluster(final JobConf dfsConfig) {
final Configuration yarnConfig = testConfigurations.createYarnConfig(dfsConfig);
- yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
+ yarnCluster = new MiniMRYarnCluster(this.getClass().getName());
yarnCluster.init(yarnConfig);
yarnCluster.start();
}
[40/48] oozie git commit: Fix build problem in
TestMapReduceActionExecutor
Posted by pb...@apache.org.
Fix build problem in TestMapReduceActionExecutor
Change-Id: I482fb0a4780c4816a1c52feb331c3be55738f0d0
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/6cd07c24
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/6cd07c24
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/6cd07c24
Branch: refs/heads/oya
Commit: 6cd07c24f00103aff38ac5b5ee6013d901cd27b4
Parents: 67dca9c
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Mon Nov 28 14:41:05 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Mon Nov 28 14:41:05 2016 +0100
----------------------------------------------------------------------
.../apache/oozie/action/hadoop/TestMapReduceActionExecutor.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/6cd07c24/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
index fc75968..88f9a7c 100644
--- a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
+++ b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
@@ -672,7 +672,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getSleepMapReduceConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "</map-reduce>";
[26/48] oozie git commit: OOZIE-2720 Test failure -
TestCoordMaterializeTriggerService#testMaxMatThrottleNotPicked (gezapeti via
rohini)
Posted by pb...@apache.org.
OOZIE-2720 Test failure - TestCoordMaterializeTriggerService#testMaxMatThrottleNotPicked (gezapeti via rohini)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/d586b7a7
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/d586b7a7
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/d586b7a7
Branch: refs/heads/oya
Commit: d586b7a79e2e9bc52f32aa93e46fb847053a5b93
Parents: 53554e8
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Tue Nov 22 10:40:22 2016 -0800
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Tue Nov 22 10:40:22 2016 -0800
----------------------------------------------------------------------
.../TestCoordMaterializeTriggerService.java | 55 ++++++++++++++------
release-log.txt | 3 +-
2 files changed, 41 insertions(+), 17 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/d586b7a7/core/src/test/java/org/apache/oozie/service/TestCoordMaterializeTriggerService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestCoordMaterializeTriggerService.java b/core/src/test/java/org/apache/oozie/service/TestCoordMaterializeTriggerService.java
index 427c9b3..aadf3fb 100644
--- a/core/src/test/java/org/apache/oozie/service/TestCoordMaterializeTriggerService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestCoordMaterializeTriggerService.java
@@ -30,6 +30,7 @@ import org.apache.oozie.client.CoordinatorAction;
import org.apache.oozie.client.CoordinatorJob;
import org.apache.oozie.client.CoordinatorJob.Execution;
import org.apache.oozie.client.CoordinatorJob.Timeunit;
+import org.apache.oozie.client.Job;
import org.apache.oozie.executor.jpa.CoordActionQueryExecutor;
import org.apache.oozie.executor.jpa.CoordJobGetActionsJPAExecutor;
import org.apache.oozie.executor.jpa.CoordJobGetJPAExecutor;
@@ -40,6 +41,7 @@ import org.apache.oozie.executor.jpa.CoordJobQueryExecutor.CoordJobQuery;
import org.apache.oozie.service.CoordMaterializeTriggerService.CoordMaterializeTriggerRunnable;
import org.apache.oozie.service.UUIDService.ApplicationType;
import org.apache.oozie.test.XDataTestCase;
+import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XLog;
@@ -47,12 +49,13 @@ import org.apache.oozie.util.XmlUtils;
public class TestCoordMaterializeTriggerService extends XDataTestCase {
private Services services;
-
+ JPAService jpaService;
@Override
protected void setUp() throws Exception {
super.setUp();
services = new Services();
services.init();
+ jpaService = Services.get().get(JPAService.class);
}
@Override
@@ -70,17 +73,15 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
* @throws Exception
*/
public void testCoordMaterializeTriggerService1() throws Exception {
-
Date start = DateUtils.parseDateOozieTZ("2009-02-01T01:00Z");
Date end = DateUtils.parseDateOozieTZ("2009-02-20T23:59Z");
final CoordinatorJobBean job = addRecordToCoordJobTable(CoordinatorJob.Status.PREP, start, end, false, false, 0);
- sleep(3000);
+ waitForStatus(30000, job, CoordinatorJob.Status.PREP);
Runnable runnable = new CoordMaterializeTriggerRunnable(3600, 300);
runnable.run();
- sleep(1000);
+ waitForStatus(10000, job, CoordinatorJob.Status.RUNNING);
- JPAService jpaService = Services.get().get(JPAService.class);
CoordJobGetJPAExecutor coordGetCmd = new CoordJobGetJPAExecutor(job.getId());
CoordinatorJobBean coordJob = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorJob.Status.RUNNING, coordJob.getStatus());
@@ -89,6 +90,17 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
assert (numWaitingActions <= coordJob.getMatThrottling());
}
+ private void waitForStatus(int timeout, final CoordinatorJobBean job, final CoordinatorJob.Status status) {
+ waitFor(timeout, new Predicate() {
+ @Override
+ public boolean evaluate() throws Exception {
+ CoordJobGetJPAExecutor coordGetCmd = new CoordJobGetJPAExecutor(job.getId());
+ CoordinatorJobBean coordJob = jpaService.execute(coordGetCmd);
+ return status == coordJob.getStatus();
+ }
+ });
+ }
+
/**
* Test current mode. The job should be picked up for materialization.
*
@@ -99,12 +111,11 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
Date end = new Date(start.getTime() + 3600 * 48 * 1000);
final CoordinatorJobBean job = addRecordToCoordJobTable(CoordinatorJob.Status.PREP, start, end, false, false, 0);
- sleep(3000);
+ waitForStatus(30000, job, CoordinatorJob.Status.PREP);
Runnable runnable = new CoordMaterializeTriggerRunnable(3600, 300);
runnable.run();
- sleep(1000);
+ waitForStatus(10000, job, CoordinatorJob.Status.RUNNING);
- JPAService jpaService = Services.get().get(JPAService.class);
CoordJobGetJPAExecutor coordGetCmd = new CoordJobGetJPAExecutor(job.getId());
CoordinatorJobBean coordJob = jpaService.execute(coordGetCmd);
assertEquals(CoordinatorJob.Status.RUNNING, coordJob.getStatus());
@@ -115,6 +126,7 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
setSystemProperty(CoordMaterializeTriggerService.CONF_MATERIALIZATION_SYSTEM_LIMIT, "1");
services = new Services();
services.init();
+ jpaService = services.get(JPAService.class);
Date start = new Date();
Date end = new Date(start.getTime() + 3600 * 5 * 1000);
@@ -129,9 +141,8 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
Runnable runnable = new CoordMaterializeTriggerRunnable(3600, 300);
runnable.run();
- sleep(1000);
+ waitForStatus(10000, job2, CoordinatorJob.Status.RUNNING);
- JPAService jpaService = Services.get().get(JPAService.class);
// second job is beyond limit but still should be picked up
job2 = jpaService.execute(new CoordJobGetJPAExecutor(job2.getId()));
assertEquals(CoordinatorJob.Status.RUNNING, job2.getStatus());
@@ -145,7 +156,7 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
setSystemProperty(CoordMaterializeTriggerService.CONF_MATERIALIZATION_SYSTEM_LIMIT, "10");
services = new Services();
services.init();
-
+ jpaService = services.get(JPAService.class);
Date start = new Date();
Date end = new Date(start.getTime() + 3600 * 5 * 1000);
CoordinatorJobBean job = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, start, end, false, false, 1);
@@ -153,12 +164,11 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
addRecordToCoordActionTable(job.getId(), 2, CoordinatorAction.Status.WAITING, "coord-action-get.xml", 0);
job.setMatThrottling(3);
CoordJobQueryExecutor.getInstance().executeUpdate(CoordJobQuery.UPDATE_COORD_JOB, job);
- JPAService jpaService = Services.get().get(JPAService.class);
job = jpaService.execute(new CoordJobGetJPAExecutor(job.getId()));
Date lastModifiedDate = job.getLastModifiedTime();
Runnable runnable = new CoordMaterializeTriggerRunnable(3600, 300);
runnable.run();
- sleep(1000);
+ waitForModification(job.getId(), lastModifiedDate);
job = jpaService.execute(new CoordJobGetJPAExecutor(job.getId()));
assertNotSame(lastModifiedDate, job.getLastModifiedTime());
@@ -167,16 +177,28 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
job = jpaService.execute(new CoordJobGetJPAExecutor(job.getId()));
lastModifiedDate = job.getLastModifiedTime();
runnable.run();
- sleep(1000);
+
job = jpaService.execute(new CoordJobGetJPAExecutor(job.getId()));
assertEquals(lastModifiedDate, job.getLastModifiedTime());
}
+ private void waitForModification(final String id, final Date lastModifiedDate) {
+ waitFor(10000, new Predicate() {
+ @Override
+ public boolean evaluate() throws Exception {
+ CoordJobGetJPAExecutor coordGetCmd = new CoordJobGetJPAExecutor(id);
+ CoordinatorJobBean coordJob = jpaService.execute(coordGetCmd);
+ return !coordJob.getLastModifiedTime().equals(lastModifiedDate);
+ }
+ });
+ }
+
public void testMaxMatThrottleNotPickedMultipleJobs() throws Exception {
Services.get().destroy();
setSystemProperty(CoordMaterializeTriggerService.CONF_MATERIALIZATION_SYSTEM_LIMIT, "3");
services = new Services();
services.init();
+ jpaService = services.get(JPAService.class);
Date start = new Date();
Date end = new Date(start.getTime() + 3600 * 5 * 1000);
CoordinatorJobBean job1 = addRecordToCoordJobTable(CoordinatorJob.Status.RUNNING, start, end, false, false, 1);
@@ -197,7 +219,6 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
job3.setMatThrottling(2);
CoordJobQueryExecutor.getInstance().executeUpdate(CoordJobQuery.UPDATE_COORD_JOB, job3);
- JPAService jpaService = Services.get().get(JPAService.class);
job1 = jpaService.execute(new CoordJobGetJPAExecutor(job1.getId()));
Date lastModifiedDate1 = job1.getLastModifiedTime();
job2 = jpaService.execute(new CoordJobGetJPAExecutor(job2.getId()));
@@ -208,7 +229,9 @@ public class TestCoordMaterializeTriggerService extends XDataTestCase {
Runnable runnable = new CoordMaterializeTriggerRunnable(3600, 300);
runnable.run();
- sleep(1000);
+ waitForModification(job1.getId(), lastModifiedDate1);
+ waitForModification(job2.getId(), lastModifiedDate2);
+ waitForModification(job3.getId(), lastModifiedDate3);
job1 = jpaService.execute(new CoordJobGetJPAExecutor(job1.getId()));
assertNotSame(lastModifiedDate1, job1.getLastModifiedTime());
http://git-wip-us.apache.org/repos/asf/oozie/blob/d586b7a7/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index e85778e..52640dd 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -5,13 +5,14 @@ OOZIE-2737 testConfigDefaultPropsToAction is flaky (satishsaley via rohini)
OOZIE-2666 Support embedding Jetty into Oozie (asasvari via rkanter)
OOZIE-1459 Remove the version in the child poms for maven-antrun-plugin (Jan Hentschel via rkanter)
OOZIE-2225 Add wild card filter for gathering jobs (sai-krish,pbacsko via rkanter,rohini)
-OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
OOZIE-1986 Add FindBugs report to pre-commit build (andras.piros via rkanter)
OOZIE-2634 Queue dump command message is confusing when the queue is empty (andras.piros via rkanter)
-- Oozie 4.3.0 release
+OOZIE-2720 Test failure - TestCoordMaterializeTriggerService#testMaxMatThrottleNotPicked (gezapeti via rohini)
+OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
OOZIE-2723 JSON.org license is now CatX (rkanter, abhishekbafna via shwethags)
OOZIE-2725 Upgrade Tomcat to 6.0.47 for the latest security fixes (rkanter via shwethags)
OOZIE-2724 coord:current resolves monthly/yearly dependencies incorrectly (satishsaley via shwethags)
[16/48] oozie git commit: OOZIE-2729 OYA: refactor XTestCase.
Posted by pb...@apache.org.
OOZIE-2729 OYA: refactor XTestCase.
Change-Id: If6b97427dea26eeaecf60bbd0ba43d6eda0effeb
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/ba68347b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/ba68347b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/ba68347b
Branch: refs/heads/oya
Commit: ba68347b56d6dd7eec792eb463cc36e9728ddbd5
Parents: d5dcc5c
Author: Andras Piros <an...@cloudera.com>
Authored: Thu Nov 17 20:50:19 2016 +0100
Committer: Andras Piros <an...@cloudera.com>
Committed: Thu Nov 17 20:50:19 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/TestFsActionExecutor.java | 4 +-
.../oozie/action/hadoop/TestShellMain.java | 5 +-
.../command/coord/TestCoordRerunXCommand.java | 4 +-
.../oozie/dependency/TestHCatURIHandler.java | 4 +-
.../oozie/jms/TestDefaultConnectionContext.java | 2 +-
.../oozie/jms/TestHCatMessageHandler.java | 2 +-
.../oozie/jms/TestJMSJobEventListener.java | 9 +-
.../oozie/jms/TestJMSSLAEventListener.java | 4 +-
.../oozie/service/TestJMSAccessorService.java | 7 +-
.../apache/oozie/test/MiniOozieTestCase.java | 3 +-
.../apache/oozie/test/TestCaseDirectories.java | 139 ++++
.../apache/oozie/test/TestConfigurations.java | 111 +++
.../org/apache/oozie/test/TestConstants.java | 96 +++
.../org/apache/oozie/test/TestDbCleaner.java | 126 ++++
.../apache/oozie/test/TestPropertiesLoader.java | 102 +++
.../apache/oozie/test/TestSystemProperties.java | 105 +++
.../org/apache/oozie/test/TestXTestCase.java | 8 +-
.../java/org/apache/oozie/test/XFsTestCase.java | 6 +-
.../java/org/apache/oozie/test/XTestCase.java | 740 ++++---------------
19 files changed, 851 insertions(+), 626 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
index 5618cf8..e7fd9c5 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
@@ -35,7 +35,7 @@ import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.URIHandlerService;
import org.apache.oozie.service.WorkflowAppService;
-import org.apache.oozie.test.XTestCase;
+import org.apache.oozie.test.TestConstants;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XmlUtils;
import org.jdom.Element;
@@ -305,7 +305,7 @@ public class TestFsActionExecutor extends ActionExecutorTestCase {
public void testDeleteHcat() throws Exception {
//setting current user as test user because directory structure created by HCat have current user permissions (755).
- setSystemProperty(XTestCase.TEST_USER1_PROP, System.getProperty("user.name"));
+ setSystemProperty(TestConstants.TEST_USER1_PROP, System.getProperty("user.name"));
createTestTable();
addPartition(db, table, "year=2012;month=12;dt=02;country=us");
URI hcatURI = getHCatURI(db, table, "country=us;year=2012;month=12;dt=02");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
index a7d6c18..d184a5a 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
@@ -25,6 +25,7 @@ import java.io.FileWriter;
import java.io.Writer;
import java.util.Properties;
+import org.apache.oozie.test.TestConstants;
import org.apache.oozie.util.XConfiguration;
//Test cases are mainly implemented in the Base class
@@ -50,8 +51,8 @@ public class TestShellMain extends ShellTestCase {
jobConf.set("fs.default.name", getNameNodeUri());
- jobConf.set(ShellMain.CONF_OOZIE_SHELL_EXEC, SHELL_COMMAND_NAME);
- String[] args = new String[] { SHELL_COMMAND_SCRIPTFILE_OPTION, script.toString(), "A", "B" };
+ jobConf.set(ShellMain.CONF_OOZIE_SHELL_EXEC, TestConstants.SHELL_COMMAND_NAME);
+ String[] args = new String[] { TestConstants.SHELL_COMMAND_SCRIPTFILE_OPTION, script.toString(), "A", "B" };
ActionUtils.setStrings(jobConf, ShellMain.CONF_OOZIE_SHELL_ARGS, args);
ActionUtils.setStrings(jobConf, ShellMain.CONF_OOZIE_SHELL_ENVS,
new String[] { "var1=value1", "var2=value2" });
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
index 9a05a85..6ae0ae2 100644
--- a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
@@ -59,8 +59,8 @@ import org.apache.oozie.service.Services;
import org.apache.oozie.service.StatusTransitService;
import org.apache.oozie.service.URIHandlerService;
import org.apache.oozie.store.StoreException;
+import org.apache.oozie.test.TestConstants;
import org.apache.oozie.test.XDataTestCase;
-import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XLog;
@@ -571,7 +571,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
*/
public void testCoordRerunCleanupForHCat() throws Exception {
//setting current user as test user because directory structure created by HCat have current user permissions (755).
- setSystemProperty(XTestCase.TEST_USER1_PROP, System.getProperty("user.name"));
+ setSystemProperty(TestConstants.TEST_USER1_PROP, System.getProperty("user.name"));
super.setupHCatalogServer();
services = super.setupServicesForHCatalog();
services.init();
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java b/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
index 615f5e1..5f62c11 100644
--- a/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
+++ b/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
@@ -24,8 +24,8 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.service.HCatAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.URIHandlerService;
+import org.apache.oozie.test.TestConstants;
import org.apache.oozie.test.XHCatTestCase;
-import org.apache.oozie.test.XTestCase;
import org.junit.Test;
public class TestHCatURIHandler extends XHCatTestCase {
@@ -69,7 +69,7 @@ public class TestHCatURIHandler extends XHCatTestCase {
@Test
public void testExists() throws Exception {
//setting current user as test user because directory structure created by HCat have current user permissions (755).
- setSystemProperty(XTestCase.TEST_USER1_PROP, System.getProperty("user.name"));
+ setSystemProperty(TestConstants.TEST_USER1_PROP, System.getProperty("user.name"));
createTestTable();
addPartition(db, table, "year=2012;month=12;dt=02;country=us");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java b/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
index e941a8e..5346a91 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
@@ -44,7 +44,7 @@ public class TestDefaultConnectionContext extends XTestCase {
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," + JMSTopicService.class.getName());
conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#"
- + ActiveMQConnFactory + ";" + "java.naming.provider.url#" + localActiveMQBroker
+ + ACTIVE_MQ_CONN_FACTORY + ";" + "java.naming.provider.url#" + LOCAL_ACTIVE_MQ_BROKER
+ ";connectionFactoryNames#" + "ConnectionFactory");
services.init();
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java b/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
index 1a1a44e..2ce5431 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
@@ -66,7 +66,7 @@ public class TestHCatMessageHandler extends XDataTestCase {
super.setUp();
services = super.setupServicesForHCatalog();
services.init();
- connFac = new ActiveMQConnectionFactory(localActiveMQBroker);
+ connFac = new ActiveMQConnectionFactory(LOCAL_ACTIVE_MQ_BROKER);
conn = connFac.createConnection();
conn.start();
session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java b/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
index 728916e..913112f 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
@@ -38,9 +38,6 @@ import org.apache.oozie.client.event.jms.JMSHeaderConstants;
import org.apache.oozie.client.event.message.CoordinatorActionMessage;
import org.apache.oozie.client.event.message.WorkflowJobMessage;
import org.apache.oozie.event.*;
-import org.apache.oozie.jms.ConnectionContext;
-import org.apache.oozie.jms.JMSConnectionInfo;
-import org.apache.oozie.jms.JMSJobEventListener;
import org.apache.oozie.service.JMSAccessorService;
import org.apache.oozie.service.JMSTopicService;
import org.apache.oozie.service.Services;
@@ -63,8 +60,8 @@ public class TestJMSJobEventListener extends XTestCase {
conf = services.getConf();
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," + JMSTopicService.class.getName());
- conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ActiveMQConnFactory
- + ";" + "java.naming.provider.url#" + localActiveMQBroker + ";connectionFactoryNames#"
+ conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY
+ + ";" + "java.naming.provider.url#" + LOCAL_ACTIVE_MQ_BROKER + ";connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
}
@@ -329,7 +326,7 @@ public class TestJMSJobEventListener extends XTestCase {
int randomPort = 30000 + random.nextInt(10000);
String brokerURl = "tcp://localhost:" + randomPort;
conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#"
- + ActiveMQConnFactory + ";" + "java.naming.provider.url#" + brokerURl + ";connectionFactoryNames#"
+ + ACTIVE_MQ_CONN_FACTORY + ";" + "java.naming.provider.url#" + brokerURl + ";connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
JMSJobEventListener wfEventListener = new JMSJobEventListener();
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java b/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
index 30fd151..7b9e64b 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
@@ -56,8 +56,8 @@ public class TestJMSSLAEventListener extends XTestCase {
conf = services.getConf();
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," + JMSTopicService.class.getName());
- conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ActiveMQConnFactory
- + ";" + "java.naming.provider.url#" + localActiveMQBroker + ";" + "connectionFactoryNames#"
+ conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY
+ + ";" + "java.naming.provider.url#" + LOCAL_ACTIVE_MQ_BROKER + ";" + "connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java b/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
index 505049d..96803db 100644
--- a/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
@@ -21,7 +21,6 @@ package org.apache.oozie.service;
import java.net.URI;
import java.util.Random;
-import javax.jms.JMSException;
import javax.jms.Session;
import org.apache.activemq.broker.BrokerService;
@@ -161,7 +160,7 @@ public class TestJMSAccessorService extends XTestCase {
servicesConf.set(JMSAccessorService.CONF_RETRY_INITIAL_DELAY, "1");
servicesConf.set(JMSAccessorService.CONF_RETRY_MAX_ATTEMPTS, "3");
servicesConf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES, "default=java.naming.factory.initial#"
- + ActiveMQConnFactory + ";" + "java.naming.provider.url#" + brokerURl + ";" + "connectionFactoryNames#"
+ + ACTIVE_MQ_CONN_FACTORY + ";" + "java.naming.provider.url#" + brokerURl + ";" + "connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
HCatAccessorService hcatService = Services.get().get(HCatAccessorService.class);
@@ -193,7 +192,7 @@ public class TestJMSAccessorService extends XTestCase {
services = super.setupServicesForHCatalog();
int randomPort = 30000 + random.nextInt(10000);
String brokerURL = "tcp://localhost:" + randomPort;
- String jndiPropertiesString = "java.naming.factory.initial#" + ActiveMQConnFactory + ";"
+ String jndiPropertiesString = "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY + ";"
+ "java.naming.provider.url#" + brokerURL + ";" + "connectionFactoryNames#" + "ConnectionFactory";
Configuration servicesConf = services.getConf();
servicesConf.set(JMSAccessorService.CONF_RETRY_INITIAL_DELAY, "1");
@@ -242,7 +241,7 @@ public class TestJMSAccessorService extends XTestCase {
public void testConnectionRetryMaxAttempt() throws Exception {
services.destroy();
services = super.setupServicesForHCatalog();
- String jndiPropertiesString = "java.naming.factory.initial#" + ActiveMQConnFactory + ";"
+ String jndiPropertiesString = "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY + ";"
+ "java.naming.provider.url#" + "tcp://localhost:12345;connectionFactoryNames#ConnectionFactory";
Configuration servicesConf = services.getConf();
servicesConf.set(JMSAccessorService.CONF_RETRY_INITIAL_DELAY, "1");
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java b/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
index 12ede02..93b9cd0 100644
--- a/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
@@ -18,7 +18,6 @@
package org.apache.oozie.test;
-import org.apache.oozie.test.XFsTestCase;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.local.LocalOozie;
@@ -39,7 +38,7 @@ import org.apache.oozie.local.LocalOozie;
* 'hdfs://localhost:9000'.
* <p/>
* The test working directory is created in the specified FS URI, under the current user name home directory, under the
- * subdirectory name specified wit the system property {@link XTestCase#OOZIE_TEST_DIR}. The default value is '/tmp'.
+ * subdirectory name specified wit the system property {@link TestConstants#OOZIE_TEST_DIR}. The default value is '/tmp'.
* <p/> The path of the test working directory is: '$FS_URI/user/$USER/$OOZIE_TEST_DIR/oozietest/$TEST_CASE_CLASS/$TEST_CASE_METHOD/'
* <p/> For example: 'hdfs://localhost:9000/user/tucu/tmp/oozietest/org.apache.oozie.service.TestELService/testEL/'
* <p/>
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java b/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java
new file mode 100644
index 0000000..36be4da
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import junit.framework.TestCase;
+import org.apache.oozie.util.ParamChecker;
+import org.apache.oozie.util.XLog;
+
+import java.io.File;
+import java.io.IOException;
+
+class TestCaseDirectories {
+
+ /**
+ * Create the test working directory.
+ *
+ * @param testCase testcase instance to obtain the working directory.
+ * @param cleanup indicates if the directory should be cleaned up if it exists.
+ * @return return the path of the test working directory, it is always an absolute path.
+ * @throws Exception if the test working directory could not be created or cleaned up.
+ */
+ String createTestCaseDir(final TestCase testCase, final boolean cleanup) throws Exception {
+ final String testCaseDir = getTestCaseDirInternal(testCase);
+ System.out.println();
+ System.out.println(XLog.format("Setting testcase work dir[{0}]", testCaseDir));
+ if (cleanup) {
+ delete(new File(testCaseDir));
+ }
+ final File dir = new File(testCaseDir);
+ if (!dir.mkdirs()) {
+ throw new RuntimeException(XLog.format("Could not create testcase dir[{0}]", testCaseDir));
+ }
+ return testCaseDir;
+ }
+
+ /**
+ * Return the test working directory.
+ * <p/>
+ * It returns <code>${oozie.test.dir}/oozietests/TESTCLASSNAME/TESTMETHODNAME</code>.
+ *
+ * @param testCase testcase instance to obtain the working directory.
+ * @return the test working directory.
+ */
+ private String getTestCaseDirInternal(final TestCase testCase) {
+ ParamChecker.notNull(testCase, "testCase");
+
+ File dir = new File(System.getProperty(TestConstants.OOZIE_TEST_DIR, "target/test-data"));
+
+ dir = new File(dir, "oozietests").getAbsoluteFile();
+ dir = new File(dir, testCase.getClass().getName());
+ dir = new File(dir, testCase.getName());
+
+ return dir.getAbsolutePath();
+ }
+
+ protected void delete(final File file) throws IOException {
+ ParamChecker.notNull(file, "file");
+ if (file.getAbsolutePath().length() < 5) {
+ throw new RuntimeException(XLog.format("path [{0}] is too short, not deleting", file.getAbsolutePath()));
+ }
+ if (file.exists()) {
+ if (file.isDirectory()) {
+ final File[] children = file.listFiles();
+ if (children != null) {
+ for (final File child : children) {
+ delete(child);
+ }
+ }
+ }
+ if (!file.delete()) {
+ throw new RuntimeException(XLog.format("could not delete path [{0}]", file.getAbsolutePath()));
+ }
+ }
+ else {
+ // With a dangling symlink, exists() doesn't return true so try to delete it anyway; we fail silently in case the file
+ // truely doesn't exist
+ file.delete();
+ }
+ }
+
+ String createTestCaseSubdir(String testCaseDir, String[] subDirNames) {
+ ParamChecker.notNull(subDirNames, "subDirName");
+ if (subDirNames.length == 0) {
+ throw new RuntimeException(XLog.format("Could not create testcase subdir ''; it already exists"));
+ }
+
+ File dir = new File(testCaseDir);
+ for (int i = 0; i < subDirNames.length; i++) {
+ ParamChecker.notNull(subDirNames[i], "subDirName[" + i + "]");
+ dir = new File(dir, subDirNames[i]);
+ }
+
+ if (!dir.mkdirs()) {
+ throw new RuntimeException(XLog.format("Could not create testcase subdir[{0}]", dir));
+ }
+ return dir.getAbsolutePath();
+ }
+
+ void createTestDirOrError() {
+ final String baseDir = System.getProperty(TestConstants.OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
+ String msg = null;
+ final File testDir = new File(baseDir);
+ if (!testDir.isAbsolute()) {
+ msg = XLog.format("System property [{0}]=[{1}] must be set to an absolute path", TestConstants.OOZIE_TEST_DIR, baseDir);
+ }
+ else {
+ if (baseDir.length() < 4) {
+ msg = XLog.format("System property [{0}]=[{1}] path must be at least 4 chars", TestConstants.OOZIE_TEST_DIR, baseDir);
+ }
+ }
+ if (msg != null) {
+ System.err.println();
+ System.err.println(msg);
+ System.exit(-1);
+ }
+ testDir.mkdirs();
+ if (!testDir.exists() || !testDir.isDirectory()) {
+ System.err.println();
+ System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
+ System.exit(-1);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
new file mode 100644
index 0000000..c6c1d23
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.oozie.dependency.FSURIHandler;
+import org.apache.oozie.dependency.HCatURIHandler;
+import org.apache.oozie.service.*;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Map;
+
+class TestConfigurations {
+
+ Configuration createYarnConfig(final Configuration parentConfig) {
+ final Configuration yarnConfig = new YarnConfiguration(parentConfig);
+
+ yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
+ yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
+
+ return yarnConfig;
+ }
+
+ @SuppressWarnings("deprecation")
+ JobConf createJobConfFromYarnCluster(final Configuration yarnConfiguration) {
+ final JobConf jobConf = new JobConf();
+ final JobConf jobConfYarn = new JobConf(yarnConfiguration);
+
+ for (final Map.Entry<String, String> entry : jobConfYarn) {
+ // MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
+ // TODO call conf.unset after moving completely to Hadoop 2.x
+ if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
+ jobConf.set(entry.getKey(), entry.getValue());
+ }
+ }
+
+ return jobConf;
+ }
+
+ JobConf createPristineJobConf(final String jobTrackerUri, final String nameNodeUri) {
+ final JobConf jobConf = new JobConf();
+
+ jobConf.set("mapred.job.tracker", jobTrackerUri);
+ jobConf.set("fs.default.name", nameNodeUri);
+
+ return jobConf;
+ }
+
+ JobConf createDFSConfig(String oozieUser, String testGroup) throws UnknownHostException {
+ final JobConf conf = new JobConf();
+ conf.set("dfs.block.access.token.enable", "false");
+ conf.set("dfs.permissions", "true");
+ conf.set("hadoop.security.authentication", "simple");
+
+ //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
+ final StringBuilder sb = new StringBuilder();
+ sb.append("127.0.0.1,localhost");
+ for (final InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
+ sb.append(",").append(i.getCanonicalHostName());
+ }
+ conf.set("hadoop.proxyuser." + oozieUser + ".hosts", sb.toString());
+
+ conf.set("hadoop.proxyuser." + oozieUser + ".groups", testGroup);
+ conf.set("mapred.tasktracker.map.tasks.maximum", "4");
+ conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
+
+ conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
+
+ // Scheduler properties required for YARN CapacityScheduler to work
+ conf.set("yarn.scheduler.capacity.root.queues", "default");
+ conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+ // Required to prevent deadlocks with YARN CapacityScheduler
+ conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
+
+ return conf;
+ }
+
+ void setConfigurationForHCatalog(final Services services) {
+ final Configuration conf = services.getConf();
+
+ conf.set(Services.CONF_SERVICE_EXT_CLASSES,
+ JMSAccessorService.class.getName() + "," +
+ PartitionDependencyManagerService.class.getName() + "," +
+ HCatAccessorService.class.getName());
+ conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
+ "default=java.naming.factory.initial#" + XTestCase.ACTIVE_MQ_CONN_FACTORY + ";" +
+ "java.naming.provider.url#" + XTestCase.LOCAL_ACTIVE_MQ_BROKER +
+ "connectionFactoryNames#" + "ConnectionFactory");
+ conf.set(URIHandlerService.URI_HANDLERS,
+ FSURIHandler.class.getName() + "," + HCatURIHandler.class.getName());
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestConstants.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestConstants.java b/core/src/test/java/org/apache/oozie/test/TestConstants.java
new file mode 100644
index 0000000..f7d4580
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/test/TestConstants.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import org.apache.hadoop.util.Shell;
+
+public class TestConstants {
+ /**
+ * System property that specifies the default test user name used by
+ * the tests. The defalt value of this property is <tt>test</tt>.
+ */
+ public static final String TEST_USER1_PROP = "oozie.test.user.test";
+ /**
+ * System property that specifies the test groiup used by the tests.
+ * The default value of this property is <tt>testg</tt>.
+ */
+ public static final String TEST_GROUP_PROP2 = "oozie.test.group2";
+ /**
+ * Name of the shell command
+ */
+ public static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd" : "bash";
+ /**
+ * Extension for shell script files
+ */
+ protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS) ? "cmd" : "sh";
+ /**
+ * Option for shell command to pass script files
+ */
+ public static final String SHELL_COMMAND_SCRIPTFILE_OPTION = (Shell.WINDOWS) ? "/c" : "-c";
+ /**
+ * System property to specify the parent directory for the 'oozietests' directory to be used as base for all test
+ * working directories. </p> If this property is not set, the assumed value is '/tmp'.
+ */
+ static final String OOZIE_TEST_DIR = "oozie.test.dir";
+ /**
+ * System property to specify the Hadoop Job Tracker to use for testing. </p> If this property is not set, the
+ * assumed value is 'locahost:9001'.
+ */
+ static final String OOZIE_TEST_JOB_TRACKER = "oozie.test.job.tracker";
+ /**
+ * System property to specify the Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
+ * value is 'locahost:9000'.
+ */
+ static final String OOZIE_TEST_NAME_NODE = "oozie.test.name.node";
+ /**
+ * System property to specify the second Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
+ * value is 'locahost:9100'.
+ */
+ static final String OOZIE_TEST_NAME_NODE2 = "oozie.test.name.node2";
+ /**
+ * System property to specify the Hadoop Version to use for testing. </p> If this property is not set, the assumed
+ * value is "0.20.0"
+ */
+ static final String HADOOP_VERSION = "hadoop.version";
+ /**
+ * System property that specifies the user that test oozie instance runs as.
+ * The value of this property defaults to the "${user.name} system property.
+ */
+ static final String TEST_OOZIE_USER_PROP = "oozie.test.user.oozie";
+ /**
+ * System property that specifies an auxilliary test user name used by the
+ * tests. The default value of this property is <tt>test2</tt>.
+ */
+ static final String TEST_USER2_PROP = "oozie.test.user.test2";
+ /**
+ * System property that specifies another auxilliary test user name used by
+ * the tests. The default value of this property is <tt>test3</tt>.
+ */
+ static final String TEST_USER3_PROP = "oozie.test.user.test3";
+ /**
+ * System property that specifies the test groiup used by the tests.
+ * The default value of this property is <tt>testg</tt>.
+ */
+ static final String TEST_GROUP_PROP = "oozie.test.group";
+ /**
+ * System property that specifies the wait time, in seconds, between testcases before
+ * triggering a shutdown. The default value is 10 sec.
+ */
+ static final String TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT = "oozie.test.minicluster.monitor.shutdown.wait";
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java b/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java
new file mode 100644
index 0000000..318274c
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.oozie.*;
+import org.apache.oozie.service.JPAService;
+import org.apache.oozie.service.ServiceException;
+import org.apache.oozie.service.Services;
+import org.apache.oozie.service.StoreService;
+import org.apache.oozie.sla.SLARegistrationBean;
+import org.apache.oozie.sla.SLASummaryBean;
+import org.apache.oozie.store.StoreException;
+import org.apache.oozie.util.XLog;
+
+import javax.persistence.EntityManager;
+import javax.persistence.FlushModeType;
+import javax.persistence.TypedQuery;
+import java.util.List;
+
+public class TestDbCleaner {
+ private static final XLog log = new XLog(LogFactory.getLog(TestDbCleaner.class));
+
+ /**
+ * Minimal set of require Services for cleaning up the database ({@link JPAService} and {@link StoreService})
+ */
+ private static final String MINIMAL_SERVICES_FOR_DB_CLEANUP = JPAService.class.getName() + "," + StoreService.class.getName();
+
+ private EntityManager entityManager;
+
+ void cleanDbTables() throws StoreException, ServiceException {
+ // If the Services are already loaded, then a test is likely calling this for something specific and we shouldn't mess with
+ // the Services; so just cleanup the database
+ if (Services.get() != null) {
+ performCleanDbTables();
+ }
+ else {
+ // Otherwise, this is probably being called during setup() and we should just load the minimal set of required Services
+ // needed to cleanup the database and shut them down when done; the test will likely start its own Services later and
+ // we don't want to interfere
+ try {
+ final Services services = new Services();
+ services.getConf().set(Services.CONF_SERVICE_CLASSES, MINIMAL_SERVICES_FOR_DB_CLEANUP);
+ services.init();
+ performCleanDbTables();
+ } finally {
+ if (Services.get() != null) {
+ Services.get().destroy();
+ }
+ }
+ }
+ }
+
+ private void performCleanDbTables() throws StoreException {
+ ensureEntityManager().setFlushMode(FlushModeType.COMMIT);
+ ensureEntityManager().getTransaction().begin();
+
+ final int wfjSize = removeAllByQueryName("GET_WORKFLOWS", WorkflowJobBean.class);
+ final int wfaSize = removeAllByQueryName("GET_ACTIONS", WorkflowActionBean.class);
+ final int cojSize = removeAllByQueryName("GET_COORD_JOBS", CoordinatorJobBean.class);
+ final int coaSize = removeAllByQueryName("GET_COORD_ACTIONS", CoordinatorActionBean.class);
+ final int bjSize = removeAllByQueryName("GET_BUNDLE_JOBS", BundleJobBean.class);
+ final int baSize = removeAllByQueryName("GET_BUNDLE_ACTIONS", BundleActionBean.class);
+ final int slaSize = removeAllByQueryName("GET_SLA_EVENTS", SLAEventBean.class);
+ final int ssSize = removeAllByQueryName("GET_SLA_EVENTS", SLAEventBean.class);
+ final int slaRegSize = removeAllByHql("select OBJECT(w) from SLARegistrationBean w", SLARegistrationBean.class);
+ final int slaSumSize = removeAllByHql("select OBJECT(w) from SLASummaryBean w", SLASummaryBean.class);
+
+ ensureEntityManager().getTransaction().commit();
+ ensureEntityManager().close();
+
+ log.info(wfjSize + " entries in WF_JOBS removed from DB!");
+ log.info(wfaSize + " entries in WF_ACTIONS removed from DB!");
+ log.info(cojSize + " entries in COORD_JOBS removed from DB!");
+ log.info(coaSize + " entries in COORD_ACTIONS removed from DB!");
+ log.info(bjSize + " entries in BUNDLE_JOBS removed from DB!");
+ log.info(baSize + " entries in BUNDLE_ACTIONS removed from DB!");
+ log.info(slaSize + " entries in SLA_EVENTS removed from DB!");
+ log.info(ssSize + " entries in SLA_SUMMARY removed from DB!");
+ log.info(slaRegSize + " entries in SLA_REGISTRATION removed from DB!");
+ log.info(slaSumSize + " entries in SLA_SUMMARY removed from DB!");
+ }
+
+ private <E> int removeAllByQueryName(final String queryName, final Class<E> entityClass) {
+ return removeAll(ensureEntityManager().createNamedQuery(queryName, entityClass));
+ }
+
+ private <E> int removeAllByHql(final String hql, final Class<E> entityClass) {
+ return removeAll(ensureEntityManager().createQuery(hql, entityClass));
+ }
+
+ private <E> int removeAll(final TypedQuery<E> query) {
+ final List<E> entitiesToRemove = query.getResultList();
+ final int removedEntitiedCount = entitiesToRemove.size();
+
+ for (final E entityToRemove : entitiesToRemove) {
+ ensureEntityManager().remove(entityToRemove);
+ }
+
+ return removedEntitiedCount;
+ }
+
+ private EntityManager ensureEntityManager() {
+ if (entityManager == null) {
+ entityManager = Services.get().get(JPAService.class).getEntityManager();
+ }
+
+ return entityManager;
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java b/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java
new file mode 100644
index 0000000..97057f8
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.Map;
+import java.util.Properties;
+
+public class TestPropertiesLoader {
+
+ public File loadTestPropertiesOrThrow() {
+ try {
+ final File oozieSrcDir = findOozieSrcDir();
+
+ final String testPropsFile = System.getProperty(XTestCase.OOZIE_TEST_PROPERTIES, "test.properties");
+ final File file = new File(testPropsFile).isAbsolute()
+ ? new File(testPropsFile) : new File(oozieSrcDir, testPropsFile);
+ if (file.exists()) {
+ loadTestProperties(file);
+ }
+ else {
+ checkTestPropertiesAndError();
+ }
+
+ return oozieSrcDir;
+ } catch (final IOException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
+ private File findOozieSrcDir() {
+ File oozieSrcDir = new File("core").getAbsoluteFile();
+
+ if (!oozieSrcDir.exists()) {
+ oozieSrcDir = oozieSrcDir.getParentFile().getParentFile();
+ oozieSrcDir = new File(oozieSrcDir, "core");
+ }
+ if (!oozieSrcDir.exists()) {
+ oozieSrcDir = oozieSrcDir.getParentFile().getParentFile();
+ oozieSrcDir = new File(oozieSrcDir, "core");
+ }
+ if (!oozieSrcDir.exists()) {
+ // We're probably being run from outside of Oozie (e.g. MiniOozie), so just use a dummy location here.
+ // Anything that uses this location should have a fallback anyway.
+ oozieSrcDir = new File(".");
+ }
+ else {
+ oozieSrcDir = oozieSrcDir.getParentFile();
+ }
+
+ return oozieSrcDir;
+ }
+
+ private void loadTestProperties(final File file) throws IOException {
+ System.out.println();
+ System.out.println("*********************************************************************************");
+ System.out.println("Loading test system properties from: " + file.getAbsolutePath());
+ System.out.println();
+ final Properties props = new Properties();
+ props.load(new FileReader(file));
+ for (final Map.Entry entry : props.entrySet()) {
+ if (!System.getProperties().containsKey(entry.getKey())) {
+ System.setProperty((String) entry.getKey(), (String) entry.getValue());
+ System.out.println(entry.getKey() + " = " + entry.getValue());
+ }
+ else {
+ System.out.println(entry.getKey() + " IGNORED, using command line value = " +
+ System.getProperty((String) entry.getKey()));
+ }
+ }
+ System.out.println("*********************************************************************************");
+ System.out.println();
+ }
+
+ private void checkTestPropertiesAndError() {
+ if (System.getProperty(XTestCase.OOZIE_TEST_PROPERTIES) != null) {
+ System.err.println();
+ System.err.println("ERROR: Specified test file does not exist: " +
+ System.getProperty(XTestCase.OOZIE_TEST_PROPERTIES));
+ System.err.println();
+ System.exit(-1);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java b/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java
new file mode 100644
index 0000000..dcc770a
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.HadoopAccessorService;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class TestSystemProperties {
+ private Map<String, String> sysProps;
+
+ private boolean embeddedHadoop = false;
+
+ private boolean embeddedHadoop2 = false;
+
+ void setupSystemProperties(final String testCaseDir) throws Exception {
+ if (System.getProperty("oozielocal.log") == null) {
+ setSystemProperty("oozielocal.log", "/tmp/oozielocal.log");
+ }
+ if (System.getProperty("oozie.test.hadoop.security", "simple").equals("kerberos")) {
+ System.setProperty("oozie.service.HadoopAccessorService.kerberos.enabled", "true");
+ }
+ if (System.getProperty("oozie.test.hadoop.minicluster", "true").equals("true")) {
+ embeddedHadoop = true;
+ // Second cluster is not necessary without the first one
+ if (System.getProperty("oozie.test.hadoop.minicluster2", "false").equals("true")) {
+ embeddedHadoop2 = true;
+ }
+ }
+
+ if (System.getProperty("oozie.test.db.host") == null) {
+ System.setProperty("oozie.test.db.host", "localhost");
+ }
+ setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
+
+ setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
+ }
+
+ /**
+ * Reset changed system properties to their original values. <p/> Called from {@link XTestCase#tearDown}.
+ */
+ void resetSystemProperties() {
+ if (sysProps != null) {
+ for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
+ if (entry.getValue() != null) {
+ System.setProperty(entry.getKey(), entry.getValue());
+ }
+ else {
+ System.getProperties().remove(entry.getKey());
+ }
+ }
+ sysProps.clear();
+ }
+ }
+
+ /**
+ * Set a system property for the duration of the method test case.
+ * <p/>
+ * After the test method ends the original value is restored.
+ *
+ * @param name system property name.
+ * @param value value to set.
+ */
+ protected void setSystemProperty(final String name, final String value) {
+ if (sysProps == null) {
+ sysProps = new HashMap<String, String>();
+ }
+ if (!sysProps.containsKey(name)) {
+ final String currentValue = System.getProperty(name);
+ sysProps.put(name, currentValue);
+ }
+ if (value != null) {
+ System.setProperty(name, value);
+ }
+ else {
+ System.getProperties().remove(name);
+ }
+ }
+
+ boolean isEmbeddedHadoop() {
+ return embeddedHadoop;
+ }
+
+ boolean isEmbeddedHadoop2() {
+ return embeddedHadoop2;
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
index 7a2d70d..735f80a 100644
--- a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
@@ -37,7 +37,7 @@ public class TestXTestCase extends TestCase {
}
public void testBaseDir() throws Exception {
- testBaseDir = System.getProperty(XTestCase.OOZIE_TEST_DIR);
+ testBaseDir = System.getProperty(TestConstants.OOZIE_TEST_DIR);
try {
MyXTestCase testcase = new MyXTestCase();
testcase.setName(getName());
@@ -47,7 +47,7 @@ public class TestXTestCase extends TestCase {
}
finally {
if (testBaseDir != null) {
- System.getProperties().setProperty(XTestCase.OOZIE_TEST_DIR, testBaseDir);
+ System.getProperties().setProperty(TestConstants.OOZIE_TEST_DIR, testBaseDir);
}
}
}
@@ -190,8 +190,8 @@ public class TestXTestCase extends TestCase {
public void testHadoopSysProps() {
if (TESTING) {
- setSystemProperty(XTestCase.OOZIE_TEST_NAME_NODE, "hdfs://xyz:9000");
- setSystemProperty(XTestCase.OOZIE_TEST_JOB_TRACKER, "xyz:9001");
+ setSystemProperty(TestConstants.OOZIE_TEST_NAME_NODE, "hdfs://xyz:9000");
+ setSystemProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "xyz:9001");
assertEquals("hdfs://xyz:9000", getNameNodeUri());
assertEquals("xyz:9001", getJobTrackerUri());
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
index 20529e8..7a419d1 100644
--- a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
-import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext;
import org.apache.oozie.service.HadoopAccessorException;
@@ -37,16 +36,15 @@ import org.apache.oozie.service.Services;
import java.io.IOException;
import java.net.URI;
-import java.net.URISyntaxException;
/**
* Base JUnit <code>TestCase</code> subclass used by all Oozie testcases that need Hadoop FS access. <p/> As part of its
* setup, this testcase class creates a unique test working directory per test method in the FS. <p/> The URI of the FS
- * namenode must be specified via the {@link XTestCase#OOZIE_TEST_NAME_NODE} system property. The default value is
+ * namenode must be specified via the {@link TestConstants#OOZIE_TEST_NAME_NODE} system property. The default value is
* 'hdfs://localhost:9000'.
*
* The test working directory is created in the specified FS URI, under the current user name home directory, under the
- * subdirectory name specified wit the system property {@link XTestCase#OOZIE_TEST_DIR}. The default value is '/tmp'.
+ * subdirectory name specified wit the system property {@link TestConstants#OOZIE_TEST_DIR}. The default value is '/tmp'.
* <p/> The path of the test working directory is: '$FS_URI/user/$USER/$OOZIE_TEST_DIR/oozietest/$TEST_CASE_CLASS/$TEST_CASE_METHOD/'
* <p/> For example: 'hdfs://localhost:9000/user/tucu/tmp/oozietest/org.apache.oozie.service.TestELService/testEL/'
*/
[07/48] oozie git commit: OOZIE-2594 correctly implement
MapReduceActionExecutor.kill()
Posted by pb...@apache.org.
OOZIE-2594 correctly implement MapReduceActionExecutor.kill()
Change-Id: Ia091bd3943f4abf1b4e9c505a01fbb926fceac91
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/782837fc
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/782837fc
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/782837fc
Branch: refs/heads/oya
Commit: 782837fcef594ae73a46a620923fb69a8248d1de
Parents: 61f3a9f
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Fri Nov 11 14:42:00 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Fri Nov 11 17:36:56 2016 +0100
----------------------------------------------------------------------
.../org/apache/oozie/action/ActionExecutor.java | 2 +-
.../action/hadoop/MapReduceActionExecutor.java | 41 ++++++++++++++++++++
.../wf/TestWorkflowActionKillXCommand.java | 14 -------
.../org/apache/oozie/test/XDataTestCase.java | 2 +-
4 files changed, 43 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/782837fc/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/ActionExecutor.java b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
index 1d6456b..919509d 100644
--- a/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
@@ -596,7 +596,7 @@ public abstract class ActionExecutor {
* @param action the action
* @return the action yarn tag
*/
- public String getActionYarnTag(Configuration conf, WorkflowJob wfJob, WorkflowAction action) {
+ public static String getActionYarnTag(Configuration conf, WorkflowJob wfJob, WorkflowAction action) {
if (conf.get(OOZIE_ACTION_YARN_TAG) != null) {
return conf.get(OOZIE_ACTION_YARN_TAG) + "@" + action.getName();
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/782837fc/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
index 1b975ab..e97de7e 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/MapReduceActionExecutor.java
@@ -33,6 +33,14 @@ import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.client.ClientRMProxy;
+import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.service.ConfigurationService;
@@ -393,6 +401,39 @@ public class MapReduceActionExecutor extends JavaActionExecutor {
}
@Override
+ public void kill(Context context, WorkflowAction action) throws ActionExecutorException {
+ // Kill the LauncherAM which submits the MR job
+ super.kill(context, action);
+
+ // We have to check whether the MapReduce execution has started or not. If it has started, then we have to get
+ // the YARN ApplicationID based on the tag and kill it as well
+
+ // TODO: this must be tested in TestMapReduceActionExecutor
+ try {
+ String tag = ActionExecutor.getActionYarnTag(new Configuration(), context.getWorkflow(), action);
+ GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
+ gar.setScope(ApplicationsRequestScope.ALL);
+ gar.setApplicationTags(Collections.singleton(tag));
+ Element actionXml = XmlUtils.parseXml(action.getConf());
+ Configuration actionConf = loadHadoopDefaultResources(context, actionXml);
+ ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(actionConf, ApplicationClientProtocol.class);
+ GetApplicationsResponse apps = proxy.getApplications(gar);
+ List<ApplicationReport> appsList = apps.getApplicationList();
+
+ YarnClient yarnClient = YarnClient.createYarnClient();
+ yarnClient.init(actionConf);
+ yarnClient.start();
+
+ for (ApplicationReport app : appsList) {
+ LOG.info("Killing MapReduce job {0}", app.getApplicationId().toString());
+ yarnClient.killApplication(app.getApplicationId());
+ }
+ } catch (Exception e) {
+ throw convertException(e);
+ }
+ }
+
+ @Override
void injectActionCallback(Context context, Configuration actionConf) {
injectCallback(context, actionConf);
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/782837fc/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index 71b46d1..ef75f14 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -115,20 +115,6 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
assertEquals(action.getExternalStatus(), "RUNNING");
}
- // FIXME - fix JAE.kill()
- public void testWfActionKillChildJob() throws Exception {
- String externalJobID = launchSleepJob(1000);
- String childId = launchSleepJob(1000000);
-
- WorkflowJobBean job = this.addRecordToWfJobTable(WorkflowJob.Status.KILLED, WorkflowInstance.Status.KILLED);
- WorkflowActionBean action = this.addRecordToWfActionTable(job.getId(), externalJobID, "1",
- WorkflowAction.Status.KILLED, childId);
-
- new ActionKillXCommand(action.getId()).call();
-
- waitUntilYarnAppKilledAndAssertSuccess(childId);
- }
-
protected WorkflowActionBean addRecordToWfActionTable(String wfId, String externalJobID, String actionName,
WorkflowAction.Status status, String childID) throws Exception {
WorkflowActionBean action = new WorkflowActionBean();
http://git-wip-us.apache.org/repos/asf/oozie/blob/782837fc/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
index ea778bd..2105e2f 100644
--- a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
@@ -1452,7 +1452,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
action.setUserRetryMax(2);
action.setUserRetryInterval(1);
action.setErrorInfo("dummyErrorCode", "dummyErrorMessage");
- action.setExternalId("dummy external id");
+ action.setExternalId("application_1234567890123_0001");
action.setExternalStatus("RUNNING");
return action;
[45/48] oozie git commit: OOZIE-2741 Remove Tomcat (asasvari via
rkanter)
Posted by pb...@apache.org.
OOZIE-2741 Remove Tomcat (asasvari via rkanter)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/5f53676b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/5f53676b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/5f53676b
Branch: refs/heads/oya
Commit: 5f53676bb544e98dacba112acbdbde8caca3e8bc
Parents: 62c060c
Author: Robert Kanter <rk...@cloudera.com>
Authored: Tue Dec 6 14:01:29 2016 -0800
Committer: Robert Kanter <rk...@cloudera.com>
Committed: Tue Dec 6 14:01:29 2016 -0800
----------------------------------------------------------------------
core/src/main/conf/oozie-env.sh | 5 +-
core/src/main/resources/oozie-default.xml | 2 +-
distro/pom.xml | 57 +---
distro/src/main/bin/oozie-setup.sh | 147 +--------
distro/src/main/bin/oozie-sys.sh | 27 --
distro/src/main/bin/oozie-tomcat-server.sh | 89 ------
distro/src/main/bin/oozied.sh | 21 +-
distro/src/main/tomcat/ROOT/WEB-INF/web.xml | 25 --
distro/src/main/tomcat/ROOT/favicon.ico | Bin 1150 -> 0 bytes
distro/src/main/tomcat/ROOT/index.html | 25 --
distro/src/main/tomcat/logging.properties | 64 ----
distro/src/main/tomcat/server.xml | 150 ----------
distro/src/main/tomcat/ssl-server.xml | 152 ----------
distro/src/main/tomcat/ssl-web.xml | 295 -------------------
pom.xml | 8 -
release-log.txt | 1 +
.../org/apache/oozie/server/JspHandler.java | 3 -
src/main/assemblies/distro-jetty.xml | 155 ----------
src/main/assemblies/distro-tomcat.xml | 153 ----------
src/main/assemblies/distro.xml | 155 ++++++++++
20 files changed, 164 insertions(+), 1370 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/core/src/main/conf/oozie-env.sh
----------------------------------------------------------------------
diff --git a/core/src/main/conf/oozie-env.sh b/core/src/main/conf/oozie-env.sh
index bc8c601..033c87b 100644
--- a/core/src/main/conf/oozie-env.sh
+++ b/core/src/main/conf/oozie-env.sh
@@ -19,12 +19,9 @@
# Set Oozie specific environment variables here.
-# Settings for the Embedded Tomcat that runs Oozie
+# Settings for the Embedded Jetty that runs Oozie
# Java System properties for Oozie should be specified in this variable
#
-if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
- export CATALINA_OPTS="$CATALINA_OPTS -Xmx1024m"
-fi
# Oozie configuration file to load from Oozie configuration directory
#
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/core/src/main/resources/oozie-default.xml
----------------------------------------------------------------------
diff --git a/core/src/main/resources/oozie-default.xml b/core/src/main/resources/oozie-default.xml
index 8565643..2d7650a 100644
--- a/core/src/main/resources/oozie-default.xml
+++ b/core/src/main/resources/oozie-default.xml
@@ -2604,7 +2604,7 @@ will be the requeue interval for the actions which are waiting for a long time w
<name>oozie.server.threadpool.max.threads</name>
<value>150</value>
<description>
- Controls the threadpool size for the Oozie Server (both Jetty and Tomcat)
+ Controls the threadpool size for the Oozie Server (if using embbedded Jetty)
</description>
</property>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/pom.xml
----------------------------------------------------------------------
diff --git a/distro/pom.xml b/distro/pom.xml
index ce5319f..740aff0 100644
--- a/distro/pom.xml
+++ b/distro/pom.xml
@@ -31,10 +31,6 @@
<name>Apache Oozie Distro</name>
<packaging>jar</packaging>
- <properties>
- <distro.descriptor>../src/main/assemblies/distro-jetty.xml</distro.descriptor>
- </properties>
-
<dependencies>
<dependency>
<groupId>org.apache.oozie</groupId>
@@ -77,7 +73,7 @@
<configuration>
<finalName>oozie-${project.version}</finalName>
<descriptors>
- <descriptor>${distro.descriptor}</descriptor>
+ <descriptor>../src/main/assemblies/distro.xml</descriptor>
</descriptors>
</configuration>
</plugin>
@@ -108,56 +104,5 @@
</dependency>
</dependencies>
</profile>
-
- <profile>
- <id>tomcat</id>
- <properties>
- <distro.descriptor>../src/main/assemblies/distro-tomcat.xml</distro.descriptor>
- </properties>
- <build>
- <plugins>
- <!-- Downloading Tomcat TAR.GZ, using downloads/ dir to avoid downloading over an over -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <version>1.6</version>
- <executions>
- <execution>
- <configuration>
- <target>
- <mkdir dir="downloads"/>
- <get src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
- dest="downloads/tomcat-${tomcat.version}.tar.gz" verbose="true" skipexisting="true"/>
- <delete dir="target/tomcat"/>
- <mkdir dir="target/tomcat"/>
- <gunzip src="downloads/tomcat-${tomcat.version}.tar.gz"
- dest="target/tomcat/tomcat-${tomcat.version}.tar"/>
- <untar src="target/tomcat/tomcat-${tomcat.version}.tar" dest="target/tomcat"/>
- <move file="target/tomcat/apache-tomcat-${tomcat.version}" tofile="target/tomcat/oozie-server"/>
- <delete dir="target/tomcat/oozie-server/webapps"/>
- <mkdir dir="target/tomcat/oozie-server/webapps"/>
- <delete file="target/tomcat/oozie-server/conf/server.xml"/>
- <copy file="src/main/tomcat/server.xml" toDir="target/tomcat/oozie-server/conf"/>
- <copy file="src/main/tomcat/logging.properties"
- toDir="target/tomcat/oozie-server/conf"/>
- <mkdir dir="target/tomcat/oozie-server/conf/ssl"/>
- <copy file="src/main/tomcat/server.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
- <copy file="src/main/tomcat/ssl-server.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
- <copy file="src/main/tomcat/ssl-web.xml" toDir="target/tomcat/oozie-server/conf/ssl"/>
- <copy todir="target/tomcat/oozie-server/webapps/ROOT">
- <fileset dir="src/main/tomcat/ROOT"/>
- </copy>
- </target>
- </configuration>
- <goals>
- <goal>run</goal>
- </goals>
- <phase>package</phase>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
</profiles>
</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/bin/oozie-setup.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-setup.sh b/distro/src/main/bin/oozie-setup.sh
index 9d6a2d0..b8d8016 100644
--- a/distro/src/main/bin/oozie-setup.sh
+++ b/distro/src/main/bin/oozie-setup.sh
@@ -125,11 +125,7 @@ additionalDir=""
extjsHome=""
jarsPath=""
prepareWar=""
-inputWar="${OOZIE_HOME}/oozie.war"
-outputWar="${CATALINA_BASE}/webapps/oozie.war"
-outputWarExpanded="${CATALINA_BASE}/webapps/oozie"
secure=""
-secureConfigsDir="${CATALINA_BASE}/conf/ssl"
while [ $# -gt 0 ]
do
@@ -143,16 +139,7 @@ do
#Create lib directory from war if lib doesn't exist
if [ ! -d "${BASEDIR}/lib" ]; then
mkdir ${BASEDIR}/lib
-
- if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
- unzip ${BASEDIR}/oozie.war WEB-INF/lib/*.jar -d ${BASEDIR}/lib > /dev/null
- mv ${BASEDIR}/lib/WEB-INF/lib/*.jar ${BASEDIR}/lib/
- rmdir ${BASEDIR}/lib/WEB-INF/lib
- rmdir ${BASEDIR}/lib/WEB-INF
- else
- cp ${JETTY_LIB_DIR}/* ${BASEDIR}/lib
- fi
-
+ cp ${JETTY_LIB_DIR}/* ${BASEDIR}/lib
fi
OOZIECPPATH=""
@@ -191,13 +178,6 @@ do
shift
done
-if [ -e "${CATALINA_PID}" -a "${OOZIE_USE_TOMCAT}" = "1" ]; then
- echo
- echo "ERROR: Stop Oozie first"
- echo
- exit -1
-fi
-
echo
@@ -274,130 +254,7 @@ prepare_jetty() {
fi
}
-prepare_tomcat() {
- if [ "${prepareWar}" == "" ]; then
- echo "no arguments given"
- printUsage
- exit -1
- else
- if [ -e "${outputWar}" ]; then
- chmod -f u+w ${outputWar}
- rm -rf ${outputWar}
- fi
- rm -rf ${outputWarExpanded}
-
- check_adding_extensions
-
- prepare
-
- checkFileExists ${inputWar}
- checkFileDoesNotExist ${outputWar}
-
- check_extjs
-
- if [ "${addJars}" = "true" ]; then
- for jarPath in ${jarsPath//:/$'\n'}
- do
- checkFileExists ${jarPath}
- done
- fi
-
- #Unpacking original war
- unzip ${inputWar} -d ${tmpWarDir} > /dev/null
- checkExec "unzipping Oozie input WAR"
-
- components=""
-
- if [ "${OOZIE_USE_TOMCAT}" == "1" ]; then
- if [ "${secure}" != "" ]; then
- #Use the SSL version of server.xml in oozie-server
- checkFileExists ${secureConfigsDir}/ssl-server.xml
- cp ${secureConfigsDir}/ssl-server.xml ${CATALINA_BASE}/conf/server.xml
- #Inject the SSL version of web.xml in oozie war
- checkFileExists ${secureConfigsDir}/ssl-web.xml
- cp ${secureConfigsDir}/ssl-web.xml ${tmpWarDir}/WEB-INF/web.xml
- echo "INFO: Using secure server.xml and secure web.xml"
- else
- #Use the regular version of server.xml in oozie-server
- checkFileExists ${secureConfigsDir}/server.xml
- cp ${secureConfigsDir}/server.xml ${CATALINA_BASE}/conf/server.xml
- #No need to restore web.xml because its already in the original WAR file
- fi
- fi
-
- if [ "${addExtjs}" = "true" ]; then
- if [ ! "${components}" = "" ];then
- components="${components}, "
- fi
- components="${components}ExtJS library"
- if [ -e ${tmpWarDir}/ext-2.2 ]; then
- echo
- echo "Specified Oozie WAR '${inputWar}' already contains ExtJS library files"
- cleanup_and_exit
- fi
- #If the extjs path given is a ZIP, expand it and use it from there
- if [ -f ${extjsHome} ]; then
- unzip ${extjsHome} -d ${tmpDir} > /dev/null
- extjsHome=${tmpDir}/ext-2.2
- fi
- #Inject the library in oozie war
- cp -r ${extjsHome} ${tmpWarDir}/ext-2.2
- checkExec "copying ExtJS files into staging"
- fi
-
- if [ "${addJars}" = "true" ]; then
- if [ ! "${components}" = "" ];then
- components="${components}, "
- fi
- components="${components}JARs"
-
- for jarPath in ${jarsPath//:/$'\n'}
- do
- found=`ls ${tmpWarDir}/WEB-INF/lib/${jarPath} 2> /dev/null | wc -l`
- checkExec "looking for JAR ${jarPath} in input WAR"
- if [ ! $found = 0 ]; then
- echo
- echo "Specified Oozie WAR '${inputWar}' already contains JAR ${jarPath}"
- cleanup_and_exit
- fi
- cp ${jarPath} ${tmpWarDir}/WEB-INF/lib/
- checkExec "copying jar ${jarPath} to staging"
- done
- fi
-
- #Creating new Oozie WAR
- currentDir=`pwd`
- cd ${tmpWarDir}
- zip -r oozie.war * > /dev/null
- checkExec "creating new Oozie WAR"
- cd ${currentDir}
-
- #copying new Oozie WAR to asked location
- if [ "${OOZIE_USE_TOMCAT}" == "1" ]; then
- cp ${tmpWarDir}/oozie.war ${outputWar}
- checkExec "copying new Oozie WAR"
-
- echo
- echo "New Oozie WAR file with added '${components}' at ${outputWar}"
- echo
- fi
-
- cleanUp
-
- if [ "$?" -ne "0" ]; then
- exit -1
- fi
-
- log_ready_to_start
-
- fi
-}
-
-if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
- prepare_tomcat
-else
- prepare_jetty
-fi
+prepare_jetty
log_ready_to_start
exit 0
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/bin/oozie-sys.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-sys.sh b/distro/src/main/bin/oozie-sys.sh
index 688aeb2..90bffba 100755
--- a/distro/src/main/bin/oozie-sys.sh
+++ b/distro/src/main/bin/oozie-sys.sh
@@ -195,13 +195,6 @@ else
print "Using OOZIE_BASE_URL: ${OOZIE_BASE_URL}"
fi
-if [ "${OOZIE_USE_TOMCAT}" = "1" -a "${CATALINA_BASE}" = "" ]; then
- export CATALINA_BASE=${OOZIE_HOME}/oozie-server
- print "Setting CATALINA_BASE: ${CATALINA_BASE}"
-else
- print "Using CATALINA_BASE: ${CATALINA_BASE}"
-fi
-
if [ "${OOZIE_HTTPS_KEYSTORE_FILE}" = "" ]; then
export OOZIE_HTTPS_KEYSTORE_FILE=${HOME}/.keystore
print "Setting OOZIE_HTTPS_KEYSTORE_FILE: ${OOZIE_HTTPS_KEYSTORE_FILE}"
@@ -223,26 +216,6 @@ else
print "Using OOZIE_INSTANCE_ID: ${OOZIE_INSTANCE_ID}"
fi
-if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
- if [ "${CATALINA_OUT}" = "" ]; then
- export CATALINA_OUT=${OOZIE_LOG}/catalina.out
- print "Setting CATALINA_OUT: ${CATALINA_OUT}"
- else
- print "Using CATALINA_OUT: ${CATALINA_OUT}"
- fi
-fi
-
-if [ "${OOZIE_USE_TOMCAT}" = "1" -a "${CATALINA_PID}" = "" ]; then
- export CATALINA_PID=${OOZIE_HOME}/oozie-server/temp/oozie.pid
- print "Setting CATALINA_PID: ${CATALINA_PID}"
-else
- print "Using CATALINA_PID: ${CATALINA_PID}"
-fi
-
-if [ "${OOZIE_USE_TOMCAT}" = "1" ]; then
- export CATALINA_OPTS="${CATALINA_OPTS} -Dderby.stream.error.file=${OOZIE_LOG}/derby.log"
-fi
-
print
setup_ooziedb() {
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/bin/oozie-tomcat-server.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozie-tomcat-server.sh b/distro/src/main/bin/oozie-tomcat-server.sh
deleted file mode 100644
index 18dd0f6..0000000
--- a/distro/src/main/bin/oozie-tomcat-server.sh
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-CATALINA=${OOZIE_CATALINA_HOME:-${BASEDIR}/oozie-server}/bin/catalina.sh
-
-setup_catalina_opts() {
- # The Java System properties 'oozie.http.port' and 'oozie.https.port' are not
- # used by Oozie, they are used in Tomcat's server.xml configuration file
- #
- echo "Using CATALINA_OPTS: ${CATALINA_OPTS}"
-
- catalina_opts="-Doozie.home.dir=${OOZIE_HOME}";
- catalina_opts="${catalina_opts} -Doozie.config.dir=${OOZIE_CONFIG}";
- catalina_opts="${catalina_opts} -Doozie.log.dir=${OOZIE_LOG}";
- catalina_opts="${catalina_opts} -Doozie.data.dir=${OOZIE_DATA}";
- catalina_opts="${catalina_opts} -Doozie.instance.id=${OOZIE_INSTANCE_ID}"
-
- catalina_opts="${catalina_opts} -Doozie.config.file=${OOZIE_CONFIG_FILE}";
-
- catalina_opts="${catalina_opts} -Doozie.log4j.file=${OOZIE_LOG4J_FILE}";
- catalina_opts="${catalina_opts} -Doozie.log4j.reload=${OOZIE_LOG4J_RELOAD}";
-
- catalina_opts="${catalina_opts} -Doozie.http.hostname=${OOZIE_HTTP_HOSTNAME}";
- catalina_opts="${catalina_opts} -Doozie.admin.port=${OOZIE_ADMIN_PORT}";
- catalina_opts="${catalina_opts} -Doozie.http.port=${OOZIE_HTTP_PORT}";
- catalina_opts="${catalina_opts} -Doozie.https.port=${OOZIE_HTTPS_PORT}";
- catalina_opts="${catalina_opts} -Doozie.base.url=${OOZIE_BASE_URL}";
- catalina_opts="${catalina_opts} -Doozie.https.keystore.file=${OOZIE_HTTPS_KEYSTORE_FILE}";
- catalina_opts="${catalina_opts} -Doozie.https.keystore.pass=${OOZIE_HTTPS_KEYSTORE_PASS}";
-
- # add required native libraries such as compression codecs
- catalina_opts="${catalina_opts} -Djava.library.path=${JAVA_LIBRARY_PATH}";
-
- echo "Adding to CATALINA_OPTS: ${catalina_opts}"
-
- export CATALINA_OPTS="${CATALINA_OPTS} ${catalina_opts}"
-}
-
-setup_oozie() {
- if [ ! -e "${CATALINA_BASE}/webapps/oozie.war" ]; then
- echo "WARN: Oozie WAR has not been set up at '${CATALINA_BASE}/webapps', doing default set up"
- ${BASEDIR}/bin/oozie-setup.sh prepare-war
- if [ "$?" -ne "0" ]; then
- exit -1
- fi
- fi
- echo
-}
-
-tomcat_main() {
- source ${BASEDIR}/bin/oozie-sys.sh
-
- #Create webapp directory from war if lib doesn't exist
- if [ ! -d "${BASEDIR}/embedded-oozie-server/webapp" ]; then
- unzip "${BASEDIR}/oozie.war" -d "${BASEDIR}/embedded-oozie-server/webapp" > /dev/null
- fi
-
- actionCmd=$1
- case $actionCmd in
- (start|run)
- setup_catalina_opts
- setup_oozie
- setup_ooziedb
- #TODO setup default oozie sharelib
- ;;
- (stop)
- setup_catalina_opts
-
- # A bug in catalina.sh script does not use CATALINA_OPTS for stopping the server
- export JAVA_OPTS=${CATALINA_OPTS}
- ;;
- esac
- exec $CATALINA $actionCmd "$@"
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/bin/oozied.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/oozied.sh b/distro/src/main/bin/oozied.sh
index 462ba76..0b1b987 100644
--- a/distro/src/main/bin/oozied.sh
+++ b/distro/src/main/bin/oozied.sh
@@ -33,28 +33,13 @@ done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
-if [ -e "${BASEDIR}/oozie-server" ]; then
- export OOZIE_USE_TOMCAT=1
-else
- export OOZIE_USE_TOMCAT=0
-fi
-
if [ $# -le 0 ]; then
- if [ "${OOZIE_USE_TOMCAT}" -eq "1" ]; then
- echo "Usage: oozied.sh (start|stop|run) [<catalina-args...>]"
- else
- echo "Usage: oozied.sh (start|stop|run)"
- fi
+ echo "Usage: oozied.sh (start|stop|run)"
exit 1
fi
actionCmd=$1
shift
-if [ "${OOZIE_USE_TOMCAT}" == "1" ]; then
- source ${BASEDIR}/bin/oozie-tomcat-server.sh
- tomcat_main $actionCmd
-else
- source ${BASEDIR}/bin/oozie-jetty-server.sh
- jetty_main $actionCmd
-fi
+source ${BASEDIR}/bin/oozie-jetty-server.sh
+jetty_main $actionCmd
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/ROOT/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/ROOT/WEB-INF/web.xml b/distro/src/main/tomcat/ROOT/WEB-INF/web.xml
deleted file mode 100644
index ad01e89..0000000
--- a/distro/src/main/tomcat/ROOT/WEB-INF/web.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<!DOCTYPE web-app
- PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.2//EN"
- "http://java.sun.com/dtd/web-app_2_3.dtd">
-
-<web-app>
- <display-name>ROOT</display-name>
-</web-app>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/ROOT/favicon.ico
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/ROOT/favicon.ico b/distro/src/main/tomcat/ROOT/favicon.ico
deleted file mode 100644
index 5d95710..0000000
Binary files a/distro/src/main/tomcat/ROOT/favicon.ico and /dev/null differ
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/ROOT/index.html
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/ROOT/index.html b/distro/src/main/tomcat/ROOT/index.html
deleted file mode 100644
index 8cc891c..0000000
--- a/distro/src/main/tomcat/ROOT/index.html
+++ /dev/null
@@ -1,25 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<html>
- <head>
- <meta http-equiv="refresh" content="0;url=./oozie">
- </head>
- <body>
- <a href="/oozie">Oozie Console</a>
- </body>
-</html>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/logging.properties
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/logging.properties b/distro/src/main/tomcat/logging.properties
deleted file mode 100644
index 305195b..0000000
--- a/distro/src/main/tomcat/logging.properties
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
-
-.handlers = 1catalina.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
-
-############################################################
-# Handler specific properties.
-# Describes specific configuration info for Handlers.
-############################################################
-
-1catalina.org.apache.juli.FileHandler.level = FINE
-1catalina.org.apache.juli.FileHandler.directory = ${oozie.log.dir}
-1catalina.org.apache.juli.FileHandler.prefix = catalina.
-
-2localhost.org.apache.juli.FileHandler.level = FINE
-2localhost.org.apache.juli.FileHandler.directory = ${oozie.log.dir}
-2localhost.org.apache.juli.FileHandler.prefix = localhost.
-
-3manager.org.apache.juli.FileHandler.level = FINE
-3manager.org.apache.juli.FileHandler.directory = ${oozie.log.dir}
-3manager.org.apache.juli.FileHandler.prefix = manager.
-
-4host-manager.org.apache.juli.FileHandler.level = FINE
-4host-manager.org.apache.juli.FileHandler.directory = ${oozie.log.dir}
-4host-manager.org.apache.juli.FileHandler.prefix = host-manager.
-
-java.util.logging.ConsoleHandler.level = FINE
-java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
-
-
-############################################################
-# Facility specific properties.
-# Provides extra control for each logger.
-############################################################
-
-org.apache.catalina.core.ContainerBase.[Catalina].[localhost].level = INFO
-org.apache.catalina.core.ContainerBase.[Catalina].[localhost].handlers = 2localhost.org.apache.juli.FileHandler
-
-org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].level = INFO
-org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].handlers = 3manager.org.apache.juli.FileHandler
-
-org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].level = INFO
-org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].handlers = 4host-manager.org.apache.juli.FileHandler
-
-# For example, set the com.xyz.foo logger to only log SEVERE
-# messages:
-#org.apache.catalina.startup.ContextConfig.level = FINE
-#org.apache.catalina.startup.HostConfig.level = FINE
-#org.apache.catalina.session.ManagerBase.level = FINE
-#org.apache.catalina.core.AprLifecycleListener.level=FINE
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/server.xml
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/server.xml b/distro/src/main/tomcat/server.xml
deleted file mode 100644
index e4fcfcc..0000000
--- a/distro/src/main/tomcat/server.xml
+++ /dev/null
@@ -1,150 +0,0 @@
-<?xml version='1.0' encoding='utf-8'?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<!-- Note: A "Server" is not itself a "Container", so you may not
- define subcomponents such as "Valves" at this level.
- Documentation at /docs/config/server.html
- -->
-<Server port="${oozie.admin.port}" shutdown="SHUTDOWN">
-
- <!--APR library loader. Documentation at /docs/apr.html -->
- <Listener className="org.apache.catalina.core.AprLifecycleListener" SSLEngine="on" />
- <!--Initialize Jasper prior to webapps are loaded. Documentation at /docs/jasper-howto.html -->
- <Listener className="org.apache.catalina.core.JasperListener" />
- <!-- Prevent memory leaks due to use of particular java/javax APIs-->
- <Listener className="org.apache.catalina.core.JreMemoryLeakPreventionListener" />
- <!-- JMX Support for the Tomcat server. Documentation at /docs/non-existent.html -->
- <Listener className="org.apache.catalina.mbeans.ServerLifecycleListener" />
- <Listener className="org.apache.catalina.mbeans.GlobalResourcesLifecycleListener" />
-
- <!-- Global JNDI resources
- Documentation at /docs/jndi-resources-howto.html
- -->
- <GlobalNamingResources>
- <!-- Editable user database that can also be used by
- UserDatabaseRealm to authenticate users
- -->
- <Resource name="UserDatabase" auth="Container"
- type="org.apache.catalina.UserDatabase"
- description="User database that can be updated and saved"
- factory="org.apache.catalina.users.MemoryUserDatabaseFactory"
- pathname="conf/tomcat-users.xml" />
- </GlobalNamingResources>
-
- <!-- A "Service" is a collection of one or more "Connectors" that share
- a single "Container" Note: A "Service" is not itself a "Container",
- so you may not define subcomponents such as "Valves" at this level.
- Documentation at /docs/config/service.html
- -->
- <Service name="Catalina">
-
- <!--The connectors can use a shared executor, you can define one or more named thread pools-->
- <!--
- <Executor name="tomcatThreadPool" namePrefix="catalina-exec-"
- maxThreads="150" minSpareThreads="4"/>
- -->
-
-
- <!-- A "Connector" represents an endpoint by which requests are received
- and responses are returned. Documentation at :
- Java HTTP Connector: /docs/config/http.html (blocking & non-blocking)
- Java AJP Connector: /docs/config/ajp.html
- APR (HTTP/AJP) Connector: /docs/apr.html
- Define a non-SSL HTTP/1.1 Connector on port ${oozie.http.port}
- -->
- <Connector port="${oozie.http.port}" protocol="HTTP/1.1"
- connectionTimeout="20000"
- maxHttpHeaderSize="65536"
- redirectPort="8443" />
- <!-- A "Connector" using the shared thread pool-->
- <!--
- <Connector executor="tomcatThreadPool"
- port="${oozie.http.port}" protocol="HTTP/1.1"
- connectionTimeout="20000"
- redirectPort="8443" />
- -->
- <!-- Define a SSL HTTP/1.1 Connector on port 8443
- This connector uses the JSSE configuration, when using APR, the
- connector should be using the OpenSSL style configuration
- described in the APR documentation -->
- <!--
- <Connector port="8443" protocol="HTTP/1.1" SSLEnabled="true"
- maxThreads="150" scheme="https" secure="true"
- clientAuth="false" sslProtocol="TLS" />
- -->
-
- <!-- Define an AJP 1.3 Connector on port 8009 -->
-
-
-
- <!-- An Engine represents the entry point (within Catalina) that processes
- every request. The Engine implementation for Tomcat stand alone
- analyzes the HTTP headers included with the request, and passes them
- on to the appropriate Host (virtual host).
- Documentation at /docs/config/engine.html -->
-
- <!-- You should set jvmRoute to support load-balancing via AJP ie :
- <Engine name="Catalina" defaultHost="localhost" jvmRoute="jvm1">
- -->
- <Engine name="Catalina" defaultHost="localhost">
-
- <!--For clustering, please take a look at documentation at:
- /docs/cluster-howto.html (simple how to)
- /docs/config/cluster.html (reference documentation) -->
- <!--
- <Cluster className="org.apache.catalina.ha.tcp.SimpleTcpCluster"/>
- -->
-
- <!-- The request dumper valve dumps useful debugging information about
- the request and response data received and sent by Tomcat.
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.valves.RequestDumperValve"/>
- -->
-
- <!-- This Realm uses the UserDatabase configured in the global JNDI
- resources under the key "UserDatabase". Any edits
- that are performed against this UserDatabase are immediately
- available for use by the Realm. -->
- <Realm className="org.apache.catalina.realm.UserDatabaseRealm"
- resourceName="UserDatabase"/>
-
- <!-- Define the default virtual host
- Note: XML Schema validation will not work with Xerces 2.2.
- -->
- <Host name="localhost" appBase="webapps"
- unpackWARs="true" autoDeploy="true"
- xmlValidation="false" xmlNamespaceAware="false">
-
- <!-- SingleSignOn valve, share authentication between web applications
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.authenticator.SingleSignOn" />
- -->
-
- <!-- Access log processes all example.
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.valves.AccessLogValve" directory="logs"
- prefix="localhost_access_log." suffix=".txt" pattern="common" resolveHosts="false"/>
- -->
-
- </Host>
- </Engine>
- </Service>
-</Server>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/ssl-server.xml
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/ssl-server.xml b/distro/src/main/tomcat/ssl-server.xml
deleted file mode 100644
index 9a44560..0000000
--- a/distro/src/main/tomcat/ssl-server.xml
+++ /dev/null
@@ -1,152 +0,0 @@
-<?xml version='1.0' encoding='utf-8'?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<!-- Note: A "Server" is not itself a "Container", so you may not
- define subcomponents such as "Valves" at this level.
- Documentation at /docs/config/server.html
- -->
-<Server port="${oozie.admin.port}" shutdown="SHUTDOWN">
-
- <!--APR library loader. Documentation at /docs/apr.html -->
- <Listener className="org.apache.catalina.core.AprLifecycleListener" SSLEngine="on" />
- <!--Initialize Jasper prior to webapps are loaded. Documentation at /docs/jasper-howto.html -->
- <Listener className="org.apache.catalina.core.JasperListener" />
- <!-- Prevent memory leaks due to use of particular java/javax APIs-->
- <Listener className="org.apache.catalina.core.JreMemoryLeakPreventionListener" />
- <!-- JMX Support for the Tomcat server. Documentation at /docs/non-existent.html -->
- <Listener className="org.apache.catalina.mbeans.ServerLifecycleListener" />
- <Listener className="org.apache.catalina.mbeans.GlobalResourcesLifecycleListener" />
-
- <!-- Global JNDI resources
- Documentation at /docs/jndi-resources-howto.html
- -->
- <GlobalNamingResources>
- <!-- Editable user database that can also be used by
- UserDatabaseRealm to authenticate users
- -->
- <Resource name="UserDatabase" auth="Container"
- type="org.apache.catalina.UserDatabase"
- description="User database that can be updated and saved"
- factory="org.apache.catalina.users.MemoryUserDatabaseFactory"
- pathname="conf/tomcat-users.xml" />
- </GlobalNamingResources>
-
- <!-- A "Service" is a collection of one or more "Connectors" that share
- a single "Container" Note: A "Service" is not itself a "Container",
- so you may not define subcomponents such as "Valves" at this level.
- Documentation at /docs/config/service.html
- -->
- <Service name="Catalina">
-
- <!--The connectors can use a shared executor, you can define one or more named thread pools-->
- <!--
- <Executor name="tomcatThreadPool" namePrefix="catalina-exec-"
- maxThreads="150" minSpareThreads="4"/>
- -->
-
-
- <!-- A "Connector" represents an endpoint by which requests are received
- and responses are returned. Documentation at :
- Java HTTP Connector: /docs/config/http.html (blocking & non-blocking)
- Java AJP Connector: /docs/config/ajp.html
- APR (HTTP/AJP) Connector: /docs/apr.html
- Define a non-SSL HTTP/1.1 Connector on port ${oozie.http.port}
- -->
- <Connector port="${oozie.http.port}" protocol="HTTP/1.1"
- connectionTimeout="20000"
- maxHttpHeaderSize="65536"
- redirectPort="${oozie.https.port}" />
- <!-- A "Connector" using the shared thread pool-->
- <!--
- <Connector executor="tomcatThreadPool"
- port="${oozie.http.port}" protocol="HTTP/1.1"
- connectionTimeout="20000"
- redirectPort="8443" />
- -->
- <!-- Define a SSL HTTP/1.1 Connector on port 8443
- This connector uses the JSSE configuration, when using APR, the
- connector should be using the OpenSSL style configuration
- described in the APR documentation -->
-
- <Connector port="${oozie.https.port}" protocol="HTTP/1.1" SSLEnabled="true"
- maxThreads="150" scheme="https" secure="true"
- maxHttpHeaderSize="65536"
- clientAuth="false" sslEnabledProtocols="TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2"
- keystoreFile="${oozie.https.keystore.file}"
- keystorePass="${oozie.https.keystore.pass}" />
-
- <!-- Define an AJP 1.3 Connector on port 8009 -->
-
-
-
- <!-- An Engine represents the entry point (within Catalina) that processes
- every request. The Engine implementation for Tomcat stand alone
- analyzes the HTTP headers included with the request, and passes them
- on to the appropriate Host (virtual host).
- Documentation at /docs/config/engine.html -->
-
- <!-- You should set jvmRoute to support load-balancing via AJP ie :
- <Engine name="Catalina" defaultHost="localhost" jvmRoute="jvm1">
- -->
- <Engine name="Catalina" defaultHost="localhost">
-
- <!--For clustering, please take a look at documentation at:
- /docs/cluster-howto.html (simple how to)
- /docs/config/cluster.html (reference documentation) -->
- <!--
- <Cluster className="org.apache.catalina.ha.tcp.SimpleTcpCluster"/>
- -->
-
- <!-- The request dumper valve dumps useful debugging information about
- the request and response data received and sent by Tomcat.
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.valves.RequestDumperValve"/>
- -->
-
- <!-- This Realm uses the UserDatabase configured in the global JNDI
- resources under the key "UserDatabase". Any edits
- that are performed against this UserDatabase are immediately
- available for use by the Realm. -->
- <Realm className="org.apache.catalina.realm.UserDatabaseRealm"
- resourceName="UserDatabase"/>
-
- <!-- Define the default virtual host
- Note: XML Schema validation will not work with Xerces 2.2.
- -->
- <Host name="localhost" appBase="webapps"
- unpackWARs="true" autoDeploy="true"
- xmlValidation="false" xmlNamespaceAware="false">
-
- <!-- SingleSignOn valve, share authentication between web applications
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.authenticator.SingleSignOn" />
- -->
-
- <!-- Access log processes all example.
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.valves.AccessLogValve" directory="logs"
- prefix="localhost_access_log." suffix=".txt" pattern="common" resolveHosts="false"/>
- -->
-
- </Host>
- </Engine>
- </Service>
-</Server>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/distro/src/main/tomcat/ssl-web.xml
----------------------------------------------------------------------
diff --git a/distro/src/main/tomcat/ssl-web.xml b/distro/src/main/tomcat/ssl-web.xml
deleted file mode 100644
index cf0d621..0000000
--- a/distro/src/main/tomcat/ssl-web.xml
+++ /dev/null
@@ -1,295 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" "http://java.sun.com/dtd/web-app_2_3.dtd" [
- <!ENTITY web-common SYSTEM "web-common.xml">
-]>
-
-<web-app>
-
- <!--
- ========================================================================
- IMPORTANT: ANY CHANGES TO THE SERVLETS, SERVLET MAPPINGS, LISTENERS, ETC
- MUST BE REFLECTED IN webapp/src/main/webapp/WEB-INF/web.xml
- AS WELL.
- ========================================================================
- -->
-
- <display-name>OOZIE</display-name>
-
- <!-- Listeners -->
- <listener>
- <listener-class>org.apache.oozie.servlet.ServicesLoader</listener-class>
- </listener>
-
- <!-- Servlets -->
- <servlet>
- <servlet-name>versions</servlet-name>
- <display-name>WS API for Workflow Instances</display-name>
- <servlet-class>org.apache.oozie.servlet.VersionServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v0admin</servlet-name>
- <display-name>Oozie admin</display-name>
- <servlet-class>org.apache.oozie.servlet.V0AdminServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v1admin</servlet-name>
- <display-name>Oozie admin</display-name>
- <servlet-class>org.apache.oozie.servlet.V1AdminServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v2admin</servlet-name>
- <display-name>Oozie admin</display-name>
- <servlet-class>org.apache.oozie.servlet.V2AdminServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>callback</servlet-name>
- <display-name>Callback Notification</display-name>
- <servlet-class>org.apache.oozie.servlet.CallbackServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v0jobs</servlet-name>
- <display-name>WS API for Workflow Jobs</display-name>
- <servlet-class>org.apache.oozie.servlet.V0JobsServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v1jobs</servlet-name>
- <display-name>WS API for Workflow Jobs</display-name>
- <servlet-class>org.apache.oozie.servlet.V1JobsServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v0job</servlet-name>
- <display-name>WS API for a specific Workflow Job</display-name>
- <servlet-class>org.apache.oozie.servlet.V0JobServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v1job</servlet-name>
- <display-name>WS API for a specific Workflow Job</display-name>
- <servlet-class>org.apache.oozie.servlet.V1JobServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v2job</servlet-name>
- <display-name>WS API for a specific Workflow Job</display-name>
- <servlet-class>org.apache.oozie.servlet.V2JobServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>sla-event</servlet-name>
- <display-name>WS API for specific SLA Events</display-name>
- <servlet-class>org.apache.oozie.servlet.SLAServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>v2sla</servlet-name>
- <display-name>WS API for specific SLA Events</display-name>
- <servlet-class>org.apache.oozie.servlet.V2SLAServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <servlet>
- <servlet-name>validate</servlet-name>
- <display-name>WS API for Workflow Applications</display-name>
- <servlet-class>org.apache.oozie.servlet.V2ValidateServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
- <!-- servlet-mapping -->
- <servlet-mapping>
- <servlet-name>versions</servlet-name>
- <url-pattern>/versions</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v0admin</servlet-name>
- <url-pattern>/v0/admin/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v1admin</servlet-name>
- <url-pattern>/v1/admin/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v2admin</servlet-name>
- <url-pattern>/v2/admin/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>callback</servlet-name>
- <url-pattern>/callback/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v0jobs</servlet-name>
- <url-pattern>/v0/jobs</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v1jobs</servlet-name>
- <url-pattern>/v1/jobs</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v1jobs</servlet-name>
- <url-pattern>/v2/jobs</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v0job</servlet-name>
- <url-pattern>/v0/job/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v1job</servlet-name>
- <url-pattern>/v1/job/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v2job</servlet-name>
- <url-pattern>/v2/job/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>sla-event</servlet-name>
- <url-pattern>/v1/sla/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>v2sla</servlet-name>
- <url-pattern>/v2/sla/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>validate</servlet-name>
- <url-pattern>/v2/validate</url-pattern>
- </servlet-mapping>
-
- <!-- welcome-file -->
- <welcome-file-list>
- <welcome-file>index.jsp</welcome-file>
- </welcome-file-list>
-
- <filter>
- <filter-name>hostnameFilter</filter-name>
- <filter-class>org.apache.oozie.servlet.HostnameFilter</filter-class>
- </filter>
-
- <filter>
- <filter-name>authenticationfilter</filter-name>
- <filter-class>org.apache.oozie.servlet.AuthFilter</filter-class>
- </filter>
-
- <filter-mapping>
- <filter-name>hostnameFilter</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/versions/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/v0/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/v1/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/v2/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/index.jsp</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/admin/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>*.js</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/ext-2.2/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>authenticationfilter</filter-name>
- <url-pattern>/docs/*</url-pattern>
- </filter-mapping>
-
- <!-- Require SSL (HTTPS) for everything except callbacks -->
- <security-constraint>
- <web-resource-collection>
- <web-resource-name>Callback</web-resource-name>
- <url-pattern>/callback/*</url-pattern>
- </web-resource-collection>
- <user-data-constraint>
- <transport-guarantee>NONE</transport-guarantee>
- </user-data-constraint>
- </security-constraint>
- <security-constraint>
- <web-resource-collection>
- <web-resource-name>Oozie Resources</web-resource-name>
- <url-pattern>/*</url-pattern>
- </web-resource-collection>
- <user-data-constraint>
- <transport-guarantee>CONFIDENTIAL</transport-guarantee>
- </user-data-constraint>
- </security-constraint>
-
- <!-- Property used by Oozie to determine that SSL (HTTPS) has been enabled -->
- <!-- Do not remove or change this -->
- <context-param>
- <param-name>ssl.enabled</param-name>
- <param-value>true</param-value>
- </context-param>
-
-</web-app>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7e67b77..d5db296 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,8 +107,6 @@
<jetty.version>9.2.19.v20160908</jetty.version>
- <!-- Tomcat version -->
- <tomcat.version>6.0.47</tomcat.version>
<jline.version>0.9.94</jline.version>
<openjpa.version>2.4.1</openjpa.version>
<xerces.version>2.10.0</xerces.version>
@@ -2014,11 +2012,5 @@
<spark.bagel.version>1.6.2</spark.bagel.version>
</properties>
</profile>
- <profile>
- <id>tomcat</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- </profile>
</profiles>
</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 2fd869f..f7b74a2 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-2741 Remove Tomcat (asasvari via rkanter)
OOZIE-2745 test-patch should also list the failed tests (gezapeti via rkanter)
OOZIE-2740 oozie help misspelled coordinator (coordiantor) and retrieved (retreived) (gsohn via rkanter)
OOZIE-2690 OOZIE NPE while executing kill() (abhishekbafna via jaydeepvishwakarma)
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/server/src/main/java/org/apache/oozie/server/JspHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/oozie/server/JspHandler.java b/server/src/main/java/org/apache/oozie/server/JspHandler.java
index 9658fd6..453998a 100644
--- a/server/src/main/java/org/apache/oozie/server/JspHandler.java
+++ b/server/src/main/java/org/apache/oozie/server/JspHandler.java
@@ -19,8 +19,6 @@
package org.apache.oozie.server;
import com.google.common.base.Preconditions;
-import org.apache.tomcat.InstanceManager;
-import org.apache.tomcat.SimpleInstanceManager;
import org.eclipse.jetty.annotations.ServletContainerInitializersStarter;
import org.eclipse.jetty.apache.jsp.JettyJasperInitializer;
import org.eclipse.jetty.jsp.JettyJspServlet;
@@ -87,7 +85,6 @@ public class JspHandler {
URI baseUri = webRootResourceLocator.getWebRootResourceUri();
servletContextHandler.setResourceBase(baseUri.toASCIIString());
servletContextHandler.setAttribute("org.eclipse.jetty.containerInitializers", jspInitializers());
- servletContextHandler.setAttribute(InstanceManager.class.getName(), new SimpleInstanceManager());
servletContextHandler.addBean(new ServletContainerInitializersStarter(servletContextHandler), true);
servletContextHandler.setClassLoader(getUrlClassLoader());
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/src/main/assemblies/distro-jetty.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-jetty.xml b/src/main/assemblies/distro-jetty.xml
deleted file mode 100644
index 6e3db57..0000000
--- a/src/main/assemblies/distro-jetty.xml
+++ /dev/null
@@ -1,155 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<assembly>
- <id>distro</id>
- <formats>
- <format>dir</format>
- <format>tar.gz</format>
- </formats>
- <includeBaseDirectory>true</includeBaseDirectory>
- <baseDirectory>oozie-${project.version}</baseDirectory>
- <fileSets>
- <!-- Oozie configuration files -->
- <fileSet>
- <directory>${basedir}/../core/src/main/conf/</directory>
- <outputDirectory>/conf</outputDirectory>
- <includes>
- <include>**</include>
- </includes>
- </fileSet>
- <!-- Distro files, readme, licenses, etc -->
- <fileSet>
- <directory>${basedir}/../</directory>
- <outputDirectory>/</outputDirectory>
- <includes>
- <include>LICENSE.txt</include>
- <include>NOTICE.txt</include>
- <include>README.txt</include>
- <include>release-log.txt</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>${basedir}/src/main/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <!-- Client -->
- <fileSet>
- <directory>${basedir}/../client/target/oozie-client-${project.version}-client/oozie-client-${project.version}/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <!-- Tools -->
- <fileSet>
- <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/libtools</directory>
- <outputDirectory>/libtools</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- </fileSet>
- <!-- Oozie Login Server Example war and jar -->
- <fileSet>
- <directory>${basedir}/../login/target</directory>
- <outputDirectory>/</outputDirectory>
- <includes>
- <include>oozie-login.war</include>
- <include>oozie-login.jar</include>
- </includes>
- <fileMode>0555</fileMode>
- </fileSet>
- <!-- Oozie Server - embedded jetty -->
- <fileSet>
- <directory>${basedir}/../server/target/</directory>
- <outputDirectory>/embedded-oozie-server</outputDirectory>
- <includes>
- <include>oozie-server*.jar</include>
- <include>**/jetty*.jar</include>
- <include>**/*jsp*.jar</include>
- <include>**/mail*.jar</include>
- <include>**/apache*.jar</include>
- <include>**/commons-el*.jar</include>
- <include>**/javax.servlet-api-3.1.0.jar</include>
- <include>**/jasper*jar</include>
- <include>**/taglibs-*jar</include>
- <include>**/org.eclipse.jdt.core-*jar</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>${basedir}/../webapp/target/oozie-webapp-${project.version}</directory>
- <outputDirectory>/embedded-oozie-server/webapp</outputDirectory>
- <excludes>
- <exclude>**/web.xml</exclude>
- </excludes>
- </fileSet>
- </fileSets>
- <files>
- <!-- Oozie configuration files -->
- <file>
- <source>${basedir}/../core/src/main/resources/oozie-default.xml</source>
- <outputDirectory>/conf</outputDirectory>
- </file>
- <!-- Oozie core jar -->
- <file>
- <source>${basedir}/../core/target/oozie-core-${project.version}.jar</source>
- <outputDirectory>/oozie-core</outputDirectory>
- </file>
- <!-- Oozie core test jar -->
- <file>
- <source>${basedir}/../core/target/oozie-core-${project.version}-tests.jar</source>
- <outputDirectory>/oozie-core</outputDirectory>
- </file>
- <!-- Oozie Documentation -->
- <file>
- <source>${basedir}/../docs/target/oozie-docs-${project.version}-docs.zip</source>
- <outputDirectory>/</outputDirectory>
- <destName>docs.zip</destName>
- </file>
- <!-- Oozie Client TAR.GZ -->
- <file>
- <source>${basedir}/../client/target/oozie-client-${project.version}-client.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie-client-${project.version}.tar.gz</destName>
- </file>
- <!-- Oozie examples TAR.GZ -->
- <file>
- <source>${basedir}/../examples/target/oozie-examples-${project.version}-examples.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie-examples.tar.gz</destName>
- </file>
- <!-- Oozie sharelib TAR.GZ -->
- <file>
- <source>${basedir}/../sharelib/target/oozie-sharelib-${project.version}.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <fileMode>0444</fileMode>
- </file>
- </files>
-</assembly>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/src/main/assemblies/distro-tomcat.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro-tomcat.xml b/src/main/assemblies/distro-tomcat.xml
deleted file mode 100644
index 82ba8f9..0000000
--- a/src/main/assemblies/distro-tomcat.xml
+++ /dev/null
@@ -1,153 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<assembly>
- <id>distro</id>
- <formats>
- <format>dir</format>
- <format>tar.gz</format>
- </formats>
- <includeBaseDirectory>true</includeBaseDirectory>
- <baseDirectory>oozie-${project.version}</baseDirectory>
- <fileSets>
- <!-- Oozie configuration files -->
- <fileSet>
- <directory>${basedir}/../core/src/main/conf/</directory>
- <outputDirectory>/conf</outputDirectory>
- <includes>
- <include>**</include>
- </includes>
- </fileSet>
- <!-- Distro files, readme, licenses, etc -->
- <fileSet>
- <directory>${basedir}/../</directory>
- <outputDirectory>/</outputDirectory>
- <includes>
- <include>LICENSE.txt</include>
- <include>NOTICE.txt</include>
- <include>README.txt</include>
- <include>release-log.txt</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>${basedir}/src/main/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <!-- Client -->
- <fileSet>
- <directory>${basedir}/../client/target/oozie-client-${project.version}-client/oozie-client-${project.version}/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <!-- Tools -->
- <fileSet>
- <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/bin</directory>
- <outputDirectory>/bin</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/libtools</directory>
- <outputDirectory>/libtools</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- </fileSet>
- <!-- Embedded Tomcat -->
- <fileSet>
- <directory>${basedir}/target/tomcat/oozie-server</directory>
- <outputDirectory>/oozie-server</outputDirectory>
- <excludes>
- <exclude>bin/*.sh</exclude>
- </excludes>
- </fileSet>
- <fileSet>
- <directory>${basedir}/target/tomcat/oozie-server/bin</directory>
- <outputDirectory>/oozie-server/bin</outputDirectory>
- <includes>
- <include>*.sh</include>
- </includes>
- <fileMode>0555</fileMode>
- </fileSet>
- <!-- Oozie Login Server Example war and jar -->
- <fileSet>
- <directory>${basedir}/../login/target</directory>
- <outputDirectory>/</outputDirectory>
- <includes>
- <include>oozie-login.war</include>
- <include>oozie-login.jar</include>
- </includes>
- <fileMode>0555</fileMode>
- </fileSet>
- </fileSets>
- <files>
- <!-- Oozie configuration files -->
- <file>
- <source>${basedir}/../core/src/main/resources/oozie-default.xml</source>
- <outputDirectory>/conf</outputDirectory>
- </file>
- <!-- Oozie core jar -->
- <file>
- <source>${basedir}/../core/target/oozie-core-${project.version}.jar</source>
- <outputDirectory>/oozie-core</outputDirectory>
- </file>
- <!-- Oozie core test jar -->
- <file>
- <source>${basedir}/../core/target/oozie-core-${project.version}-tests.jar</source>
- <outputDirectory>/oozie-core</outputDirectory>
- </file>
- <!-- Oozie war -->
- <file>
- <source>${basedir}/../webapp/target/oozie-webapp-${project.version}.war</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie.war</destName>
- </file>
- <!-- Oozie Documentation -->
- <file>
- <source>${basedir}/../docs/target/oozie-docs-${project.version}-docs.zip</source>
- <outputDirectory>/</outputDirectory>
- <destName>docs.zip</destName>
- </file>
- <!-- Oozie Client TAR.GZ -->
- <file>
- <source>${basedir}/../client/target/oozie-client-${project.version}-client.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie-client-${project.version}.tar.gz</destName>
- </file>
- <!-- Oozie examples TAR.GZ -->
- <file>
- <source>${basedir}/../examples/target/oozie-examples-${project.version}-examples.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <destName>oozie-examples.tar.gz</destName>
- </file>
- <!-- Oozie sharelib TAR.GZ -->
- <file>
- <source>${basedir}/../sharelib/target/oozie-sharelib-${project.version}.tar.gz</source>
- <outputDirectory>/</outputDirectory>
- <fileMode>0444</fileMode>
- </file>
- </files>
-</assembly>
http://git-wip-us.apache.org/repos/asf/oozie/blob/5f53676b/src/main/assemblies/distro.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distro.xml b/src/main/assemblies/distro.xml
new file mode 100644
index 0000000..6e3db57
--- /dev/null
+++ b/src/main/assemblies/distro.xml
@@ -0,0 +1,155 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>distro</id>
+ <formats>
+ <format>dir</format>
+ <format>tar.gz</format>
+ </formats>
+ <includeBaseDirectory>true</includeBaseDirectory>
+ <baseDirectory>oozie-${project.version}</baseDirectory>
+ <fileSets>
+ <!-- Oozie configuration files -->
+ <fileSet>
+ <directory>${basedir}/../core/src/main/conf/</directory>
+ <outputDirectory>/conf</outputDirectory>
+ <includes>
+ <include>**</include>
+ </includes>
+ </fileSet>
+ <!-- Distro files, readme, licenses, etc -->
+ <fileSet>
+ <directory>${basedir}/../</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>LICENSE.txt</include>
+ <include>NOTICE.txt</include>
+ <include>README.txt</include>
+ <include>release-log.txt</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Client -->
+ <fileSet>
+ <directory>${basedir}/../client/target/oozie-client-${project.version}-client/oozie-client-${project.version}/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Tools -->
+ <fileSet>
+ <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/bin</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/../tools/target/oozie-tools-${project.version}-tools/oozie-tools-${project.version}/libtools</directory>
+ <outputDirectory>/libtools</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ </fileSet>
+ <!-- Oozie Login Server Example war and jar -->
+ <fileSet>
+ <directory>${basedir}/../login/target</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>oozie-login.war</include>
+ <include>oozie-login.jar</include>
+ </includes>
+ <fileMode>0555</fileMode>
+ </fileSet>
+ <!-- Oozie Server - embedded jetty -->
+ <fileSet>
+ <directory>${basedir}/../server/target/</directory>
+ <outputDirectory>/embedded-oozie-server</outputDirectory>
+ <includes>
+ <include>oozie-server*.jar</include>
+ <include>**/jetty*.jar</include>
+ <include>**/*jsp*.jar</include>
+ <include>**/mail*.jar</include>
+ <include>**/apache*.jar</include>
+ <include>**/commons-el*.jar</include>
+ <include>**/javax.servlet-api-3.1.0.jar</include>
+ <include>**/jasper*jar</include>
+ <include>**/taglibs-*jar</include>
+ <include>**/org.eclipse.jdt.core-*jar</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/../webapp/target/oozie-webapp-${project.version}</directory>
+ <outputDirectory>/embedded-oozie-server/webapp</outputDirectory>
+ <excludes>
+ <exclude>**/web.xml</exclude>
+ </excludes>
+ </fileSet>
+ </fileSets>
+ <files>
+ <!-- Oozie configuration files -->
+ <file>
+ <source>${basedir}/../core/src/main/resources/oozie-default.xml</source>
+ <outputDirectory>/conf</outputDirectory>
+ </file>
+ <!-- Oozie core jar -->
+ <file>
+ <source>${basedir}/../core/target/oozie-core-${project.version}.jar</source>
+ <outputDirectory>/oozie-core</outputDirectory>
+ </file>
+ <!-- Oozie core test jar -->
+ <file>
+ <source>${basedir}/../core/target/oozie-core-${project.version}-tests.jar</source>
+ <outputDirectory>/oozie-core</outputDirectory>
+ </file>
+ <!-- Oozie Documentation -->
+ <file>
+ <source>${basedir}/../docs/target/oozie-docs-${project.version}-docs.zip</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>docs.zip</destName>
+ </file>
+ <!-- Oozie Client TAR.GZ -->
+ <file>
+ <source>${basedir}/../client/target/oozie-client-${project.version}-client.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie-client-${project.version}.tar.gz</destName>
+ </file>
+ <!-- Oozie examples TAR.GZ -->
+ <file>
+ <source>${basedir}/../examples/target/oozie-examples-${project.version}-examples.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <destName>oozie-examples.tar.gz</destName>
+ </file>
+ <!-- Oozie sharelib TAR.GZ -->
+ <file>
+ <source>${basedir}/../sharelib/target/oozie-sharelib-${project.version}.tar.gz</source>
+ <outputDirectory>/</outputDirectory>
+ <fileMode>0444</fileMode>
+ </file>
+ </files>
+</assembly>
[15/48] oozie git commit: OOZIE-2729 OYA: refactor XTestCase.
Posted by pb...@apache.org.
http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index ca3f883..4442513 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -21,22 +21,14 @@ package org.apache.oozie.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
-import java.io.FileReader;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
-import java.net.InetAddress;
import java.net.URL;
import java.util.*;
-import java.net.UnknownHostException;
-import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import javax.persistence.EntityManager;
-import javax.persistence.FlushModeType;
-import javax.persistence.Query;
-
import junit.framework.TestCase;
import org.apache.commons.io.FilenameUtils;
@@ -51,39 +43,20 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
-import org.apache.oozie.BundleActionBean;
-import org.apache.oozie.BundleJobBean;
-import org.apache.oozie.CoordinatorActionBean;
-import org.apache.oozie.CoordinatorJobBean;
-import org.apache.oozie.SLAEventBean;
-import org.apache.oozie.WorkflowActionBean;
-import org.apache.oozie.WorkflowJobBean;
-import org.apache.oozie.dependency.FSURIHandler;
-import org.apache.oozie.dependency.HCatURIHandler;
import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HCatAccessorService;
import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.JMSAccessorService;
import org.apache.oozie.service.JPAService;
-import org.apache.oozie.service.PartitionDependencyManagerService;
import org.apache.oozie.service.ServiceException;
import org.apache.oozie.service.Services;
-import org.apache.oozie.service.StoreService;
-import org.apache.oozie.service.URIHandlerService;
-import org.apache.oozie.sla.SLARegistrationBean;
-import org.apache.oozie.sla.SLASummaryBean;
-import org.apache.oozie.store.StoreException;
import org.apache.oozie.test.MiniHCatServer.RUNMODE;
import org.apache.oozie.test.hive.MiniHS2;
import org.apache.oozie.util.ClasspathUtils;
@@ -109,169 +82,24 @@ import org.apache.oozie.util.XLog;
* From within testcases, system properties must be changed using the {@link #setSystemProperty} method.
*/
public abstract class XTestCase extends TestCase {
- private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES = EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
- private Map<String, String> sysProps;
+ private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES =
+ EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
+ protected static final File OOZIE_SRC_DIR = new TestPropertiesLoader().loadTestPropertiesOrThrow();
+ private final TestCaseDirectories testCaseDirectories = new TestCaseDirectories();
+ private final TestSystemProperties testSystemProperties = new TestSystemProperties();
+
+ private final TestConfigurations testConfigurations = new TestConfigurations();
private String testCaseDir;
+
private String testCaseConfDir;
private String hadoopVersion;
protected XLog log = new XLog(LogFactory.getLog(getClass()));
- protected static File OOZIE_SRC_DIR = null;
- private static final String OOZIE_TEST_PROPERTIES = "oozie.test.properties";
+ static final String OOZIE_TEST_PROPERTIES = "oozie.test.properties";
protected static final String SYSTEM_LINE_SEPARATOR = System.getProperty("line.separator");
-
- public static float WAITFOR_RATIO = Float.parseFloat(System.getProperty("oozie.test.waitfor.ratio", "1"));
- protected static final String localActiveMQBroker = "vm://localhost?broker.persistent=false";
- protected static final String ActiveMQConnFactory = "org.apache.activemq.jndi.ActiveMQInitialContextFactory";
-
- static {
- try {
- OOZIE_SRC_DIR = new File("core").getAbsoluteFile();
- if (!OOZIE_SRC_DIR.exists()) {
- OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile().getParentFile();
- OOZIE_SRC_DIR = new File(OOZIE_SRC_DIR, "core");
- }
- if (!OOZIE_SRC_DIR.exists()) {
- OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile().getParentFile();
- OOZIE_SRC_DIR = new File(OOZIE_SRC_DIR, "core");
- }
- if (!OOZIE_SRC_DIR.exists()) {
- // We're probably being run from outside of Oozie (e.g. MiniOozie), so just use a dummy location here.
- // Anything that uses this location should have a fallback anyway.
- OOZIE_SRC_DIR = new File(".");
- } else {
- OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile();
- }
-
- final String testPropsFile = System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
- final File file = new File(testPropsFile).isAbsolute()
- ? new File(testPropsFile) : new File(OOZIE_SRC_DIR, testPropsFile);
- if (file.exists()) {
- System.out.println();
- System.out.println("*********************************************************************************");
- System.out.println("Loading test system properties from: " + file.getAbsolutePath());
- System.out.println();
- final Properties props = new Properties();
- props.load(new FileReader(file));
- for (final Map.Entry entry : props.entrySet()) {
- if (!System.getProperties().containsKey(entry.getKey())) {
- System.setProperty((String) entry.getKey(), (String) entry.getValue());
- System.out.println(entry.getKey() + " = " + entry.getValue());
- }
- else {
- System.out.println(entry.getKey() + " IGNORED, using command line value = " +
- System.getProperty((String) entry.getKey()));
- }
- }
- System.out.println("*********************************************************************************");
- System.out.println();
- }
- else {
- if (System.getProperty(OOZIE_TEST_PROPERTIES) != null) {
- System.err.println();
- System.err.println("ERROR: Specified test file does not exist: " +
- System.getProperty(OOZIE_TEST_PROPERTIES));
- System.err.println();
- System.exit(-1);
- }
- }
- } catch (final IOException ex) {
- throw new RuntimeException(ex);
- }
-
- }
-
- /**
- * System property to specify the parent directory for the 'oozietests' directory to be used as base for all test
- * working directories. </p> If this property is not set, the assumed value is '/tmp'.
- */
- public static final String OOZIE_TEST_DIR = "oozie.test.dir";
-
- /**
- * System property to specify the Hadoop Job Tracker to use for testing. </p> If this property is not set, the
- * assumed value is 'locahost:9001'.
- */
- public static final String OOZIE_TEST_JOB_TRACKER = "oozie.test.job.tracker";
-
- /**
- * System property to specify the Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
- * value is 'locahost:9000'.
- */
- public static final String OOZIE_TEST_NAME_NODE = "oozie.test.name.node";
-
- /**
- * System property to specify the second Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
- * value is 'locahost:9100'.
- */
- public static final String OOZIE_TEST_NAME_NODE2 = "oozie.test.name.node2";
-
- /**
- * System property to specify the Hadoop Version to use for testing. </p> If this property is not set, the assumed
- * value is "0.20.0"
- */
- public static final String HADOOP_VERSION = "hadoop.version";
-
- /**
- * System property that specifies the user that test oozie instance runs as.
- * The value of this property defaults to the "${user.name} system property.
- */
- public static final String TEST_OOZIE_USER_PROP = "oozie.test.user.oozie";
-
- /**
- * System property that specifies the default test user name used by
- * the tests. The defalt value of this property is <tt>test</tt>.
- */
- public static final String TEST_USER1_PROP = "oozie.test.user.test";
-
- /**
- * System property that specifies an auxilliary test user name used by the
- * tests. The default value of this property is <tt>test2</tt>.
- */
- public static final String TEST_USER2_PROP = "oozie.test.user.test2";
-
- /**
- * System property that specifies another auxilliary test user name used by
- * the tests. The default value of this property is <tt>test3</tt>.
- */
- public static final String TEST_USER3_PROP = "oozie.test.user.test3";
-
- /**
- * System property that specifies the test groiup used by the tests.
- * The default value of this property is <tt>testg</tt>.
- */
- public static final String TEST_GROUP_PROP = "oozie.test.group";
-
- /**
- * System property that specifies the test groiup used by the tests.
- * The default value of this property is <tt>testg</tt>.
- */
- public static final String TEST_GROUP_PROP2 = "oozie.test.group2";
-
- /**
- * System property that specifies the wait time, in seconds, between testcases before
- * triggering a shutdown. The default value is 10 sec.
- */
- public static final String TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT = "oozie.test.minicluster.monitor.shutdown.wait";
-
- /**
- * Name of the shell command
- */
- protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd" : "bash";
-
- /**
- * Extension for shell script files
- */
- protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS) ? "cmd" : "sh";
-
- /**
- * Option for shell command to pass script files
- */
- protected static final String SHELL_COMMAND_SCRIPTFILE_OPTION = (Shell.WINDOWS) ? "/c" : "-c";
-
- /**
- * Minimal set of require Services for cleaning up the database ({@link JPAService} and {@link StoreService})
- */
- private static final String MINIMAL_SERVICES_FOR_DB_CLEANUP = JPAService.class.getName() + "," + StoreService.class.getName();
+ protected static float WAITFOR_RATIO = Float.parseFloat(System.getProperty("oozie.test.waitfor.ratio", "1"));
+ protected static final String LOCAL_ACTIVE_MQ_BROKER = "vm://localhost?broker.persistent=false";
+ protected static final String ACTIVE_MQ_CONN_FACTORY = "org.apache.activemq.jndi.ActiveMQInitialContextFactory";
/**
* Initialize the test working directory. <p/> If it does not exist it creates it, if it already exists it deletes
@@ -295,37 +123,52 @@ public abstract class XTestCase extends TestCase {
protected void setUp(final boolean cleanUpDBTables) throws Exception {
RUNNING_TESTCASES.incrementAndGet();
super.setUp();
- final String baseDir = System.getProperty(OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
- String msg = null;
- final File f = new File(baseDir);
- if (!f.isAbsolute()) {
- msg = XLog.format("System property [{0}]=[{1}] must be set to an absolute path", OOZIE_TEST_DIR, baseDir);
+
+ testCaseDirectories.createTestDirOrError();
+
+ hadoopVersion = System.getProperty(TestConstants.HADOOP_VERSION, "0.20.0");
+ testCaseDir = testCaseDirectories.createTestCaseDir(this, true);
+
+ setupOozieHome();
+
+ testCaseConfDir = createTestCaseSubDir("conf");
+
+ final InputStream oozieSiteSourceStream = loadTestOozieSiteOrError();
+
+ setupOozieSiteConfiguration(oozieSiteSourceStream);
+
+ final File hadoopConfDir = copyAndGetHadoopConfig();
+
+ testSystemProperties.setupSystemProperties(testCaseDir);
+ if (testSystemProperties.isEmbeddedHadoop()) {
+ setUpEmbeddedHadoop(testCaseDir);
}
- else {
- if (baseDir.length() < 4) {
- msg = XLog.format("System property [{0}]=[{1}] path must be at least 4 chars", OOZIE_TEST_DIR, baseDir);
+ if (testSystemProperties.isEmbeddedHadoop2()) {
+ setUpEmbeddedHadoop2();
+ }
+
+ if (yarnCluster != null) {
+ try (final OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"))) {
+ final Configuration conf = testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
+ conf.writeXml(os);
}
}
- if (msg != null) {
- System.err.println();
- System.err.println(msg);
- System.exit(-1);
+
+ if (System.getProperty("oozie.test.metastore.server", "false").equals("true")) {
+ setupHCatalogServer();
}
- f.mkdirs();
- if (!f.exists() || !f.isDirectory()) {
- System.err.println();
- System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
- System.exit(-1);
+
+ if (System.getProperty("oozie.test.hive.server.2", "false").equals("true")) {
+ setupHiveServer2();
}
- hadoopVersion = System.getProperty(HADOOP_VERSION, "0.20.0");
- testCaseDir = createTestCaseDir(this, true);
- //setting up Oozie HOME and Oozie conf directory
- setSystemProperty(Services.OOZIE_HOME_DIR, testCaseDir);
- Services.setOozieHome();
- testCaseConfDir = createTestCaseSubDir("conf");
+ // Cleanup any leftover database data to make sure we start each test with an empty database
+ if (cleanUpDBTables) {
+ cleanUpDBTables();
+ }
+ }
- // load test Oozie site
+ private InputStream loadTestOozieSiteOrError() throws IOException {
final String oozieTestDB = System.getProperty("oozie.test.db", "hsqldb");
final String defaultOozieSize =
new File(OOZIE_SRC_DIR, "core/src/test/resources/" + oozieTestDB + "-oozie-site.xml").getAbsolutePath();
@@ -354,6 +197,15 @@ public abstract class XTestCase extends TestCase {
System.exit(-1);
}
}
+ return oozieSiteSourceStream;
+ }
+
+ private void setupOozieHome() throws ServiceException {
+ setSystemProperty(Services.OOZIE_HOME_DIR, testCaseDir);
+ Services.setOozieHome();
+ }
+
+ private void setupOozieSiteConfiguration(final InputStream oozieSiteSourceStream) throws IOException {
// Copy the specified oozie-site file from oozieSiteSourceStream to the test case dir as oozie-site.xml
final Configuration oozieSiteConf = new Configuration(false);
oozieSiteConf.addResource(oozieSiteSourceStream);
@@ -366,14 +218,16 @@ public abstract class XTestCase extends TestCase {
oozieSiteConf.set(Services.CONF_SERVICE_CLASSES, classes.replaceAll("org.apache.oozie.service.ShareLibService,", ""));
// Make sure to create the Oozie DB during unit tests
oozieSiteConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
- File target = new File(testCaseConfDir, "oozie-site.xml");
+ final File target = new File(testCaseConfDir, "oozie-site.xml");
oozieSiteConf.writeXml(new FileOutputStream(target));
+ }
+ private File copyAndGetHadoopConfig() throws IOException {
final File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
hadoopConfDir.mkdir();
final File actionConfDir = new File(testCaseConfDir, "action-conf");
actionConfDir.mkdir();
- source = new File(OOZIE_SRC_DIR, "core/src/test/resources/hadoop-config.xml");
+ final File source = new File(OOZIE_SRC_DIR, "core/src/test/resources/hadoop-config.xml");
InputStream hadoopConfigResourceStream = null;
if (!source.exists()) {
// If we can't find it, try using the class loader (useful if we're using XTestCase from outside core)
@@ -392,49 +246,9 @@ public abstract class XTestCase extends TestCase {
} else {
hadoopConfigResourceStream = new FileInputStream(source);
}
- target = new File(hadoopConfDir, "hadoop-site.xml");
+ final File target = new File(hadoopConfDir, "hadoop-site.xml");
IOUtils.copyStream(hadoopConfigResourceStream, new FileOutputStream(target));
-
- if (System.getProperty("oozielocal.log") == null) {
- setSystemProperty("oozielocal.log", "/tmp/oozielocal.log");
- }
- if (System.getProperty("oozie.test.hadoop.security", "simple").equals("kerberos")) {
- System.setProperty("oozie.service.HadoopAccessorService.kerberos.enabled", "true");
- }
- if (System.getProperty("oozie.test.hadoop.minicluster", "true").equals("true")) {
- setUpEmbeddedHadoop(getTestCaseDir());
- // Second cluster is not necessary without the first one
- if (System.getProperty("oozie.test.hadoop.minicluster2", "false").equals("true")) {
- setUpEmbeddedHadoop2();
- }
- }
-
- if (System.getProperty("oozie.test.db.host") == null) {
- System.setProperty("oozie.test.db.host", "localhost");
- }
- setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
-
- setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
-
- if (yarnCluster != null) {
- try (final OutputStream os = new FileOutputStream(new File(hadoopConfDir, "core-site.xml"))) {
- final Configuration conf = createJobConfFromYarnCluster();
- conf.writeXml(os);
- }
- }
-
- if (System.getProperty("oozie.test.metastore.server", "false").equals("true")) {
- setupHCatalogServer();
- }
-
- if (System.getProperty("oozie.test.hive.server.2", "false").equals("true")) {
- setupHiveServer2();
- }
-
- // Cleanup any leftover database data to make sure we start each test with an empty database
- if (cleanUpDBTables) {
- cleanUpDBTables();
- }
+ return hadoopConfDir;
}
/**
@@ -444,8 +258,7 @@ public abstract class XTestCase extends TestCase {
protected void tearDown() throws Exception {
tearDownHiveServer2();
tearDownHCatalogServer();
- resetSystemProperties();
- sysProps = null;
+ testSystemProperties.resetSystemProperties();
testCaseDir = null;
super.tearDown();
RUNNING_TESTCASES.decrementAndGet();
@@ -499,7 +312,7 @@ public abstract class XTestCase extends TestCase {
* @return Oozie's user Id for running the test cases.
*/
public static String getOozieUser() {
- return System.getProperty(TEST_OOZIE_USER_PROP, System.getProperty("user.name"));
+ return System.getProperty(TestConstants.TEST_OOZIE_USER_PROP, System.getProperty("user.name"));
}
/**
@@ -508,7 +321,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser() {
- return System.getProperty(TEST_USER1_PROP, "test");
+ return System.getProperty(TestConstants.TEST_USER1_PROP, "test");
}
/**
@@ -518,7 +331,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser2() {
- return System.getProperty(TEST_USER2_PROP, "test2");
+ return System.getProperty(TestConstants.TEST_USER2_PROP, "test2");
}
/**
@@ -528,7 +341,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser3() {
- return System.getProperty(TEST_USER3_PROP, "test3");
+ return System.getProperty(TestConstants.TEST_USER3_PROP, "test3");
}
/**
@@ -537,7 +350,7 @@ public abstract class XTestCase extends TestCase {
* @return the test group.
*/
protected static String getTestGroup() {
- return System.getProperty(TEST_GROUP_PROP, "testg");
+ return System.getProperty(TestConstants.TEST_GROUP_PROP, "testg");
}
/**
@@ -546,71 +359,11 @@ public abstract class XTestCase extends TestCase {
* @return the test group.
*/
protected static String getTestGroup2() {
- return System.getProperty(TEST_GROUP_PROP, "testg2");
- }
-
- /**
- * Return the test working directory.
- * <p/>
- * It returns <code>${oozie.test.dir}/oozietests/TESTCLASSNAME/TESTMETHODNAME</code>.
- *
- * @param testCase testcase instance to obtain the working directory.
- * @return the test working directory.
- */
- private String getTestCaseDirInternal(final TestCase testCase) {
- ParamChecker.notNull(testCase, "testCase");
- File dir = new File(System.getProperty(OOZIE_TEST_DIR, "target/test-data"));
- dir = new File(dir, "oozietests").getAbsoluteFile();
- dir = new File(dir, testCase.getClass().getName());
- dir = new File(dir, testCase.getName());
- return dir.getAbsolutePath();
+ return System.getProperty(TestConstants.TEST_GROUP_PROP, "testg2");
}
protected void delete(final File file) throws IOException {
- ParamChecker.notNull(file, "file");
- if (file.getAbsolutePath().length() < 5) {
- throw new RuntimeException(XLog.format("path [{0}] is too short, not deleting", file.getAbsolutePath()));
- }
- if (file.exists()) {
- if (file.isDirectory()) {
- final File[] children = file.listFiles();
- if (children != null) {
- for (final File child : children) {
- delete(child);
- }
- }
- }
- if (!file.delete()) {
- throw new RuntimeException(XLog.format("could not delete path [{0}]", file.getAbsolutePath()));
- }
- }
- else {
- // With a dangling symlink, exists() doesn't return true so try to delete it anyway; we fail silently in case the file
- // truely doesn't exist
- file.delete();
- }
- }
-
- /**
- * Create the test working directory.
- *
- * @param testCase testcase instance to obtain the working directory.
- * @param cleanup indicates if the directory should be cleaned up if it exists.
- * @return return the path of the test working directory, it is always an absolute path.
- * @throws Exception if the test working directory could not be created or cleaned up.
- */
- private String createTestCaseDir(final TestCase testCase, final boolean cleanup) throws Exception {
- final String testCaseDir = getTestCaseDirInternal(testCase);
- System.out.println();
- System.out.println(XLog.format("Setting testcase work dir[{0}]", testCaseDir));
- if (cleanup) {
- delete(new File(testCaseDir));
- }
- final File dir = new File(testCaseDir);
- if (!dir.mkdirs()) {
- throw new RuntimeException(XLog.format("Could not create testcase dir[{0}]", testCaseDir));
- }
- return testCaseDir;
+ testCaseDirectories.delete(file);
}
/**
@@ -620,23 +373,8 @@ public abstract class XTestCase extends TestCase {
* @return the absolute path to the created directory.
*/
protected String createTestCaseSubDir(final String... subDirNames) {
- ParamChecker.notNull(subDirNames, "subDirName");
- if (subDirNames.length == 0) {
- throw new RuntimeException(XLog.format("Could not create testcase subdir ''; it already exists"));
- }
-
- File dir = new File(testCaseDir);
- for (int i = 0; i < subDirNames.length; i++) {
- ParamChecker.notNull(subDirNames[i], "subDirName[" + i + "]");
- dir = new File(dir, subDirNames[i]);
- }
-
- if (!dir.mkdirs()) {
- throw new RuntimeException(XLog.format("Could not create testcase subdir[{0}]", dir));
- }
- return dir.getAbsolutePath();
+ return testCaseDirectories.createTestCaseSubdir(testCaseDir, subDirNames);
}
-
/**
* Set a system property for the duration of the method test case.
* <p/>
@@ -646,42 +384,13 @@ public abstract class XTestCase extends TestCase {
* @param value value to set.
*/
protected void setSystemProperty(final String name, final String value) {
- if (sysProps == null) {
- sysProps = new HashMap<String, String>();
- }
- if (!sysProps.containsKey(name)) {
- final String currentValue = System.getProperty(name);
- sysProps.put(name, currentValue);
- }
- if (value != null) {
- System.setProperty(name, value);
- }
- else {
- System.getProperties().remove(name);
- }
- }
-
- /**
- * Reset changed system properties to their original values. <p/> Called from {@link #tearDown}.
- */
- private void resetSystemProperties() {
- if (sysProps != null) {
- for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
- if (entry.getValue() != null) {
- System.setProperty(entry.getKey(), entry.getValue());
- }
- else {
- System.getProperties().remove(entry.getKey());
- }
- }
- sysProps.clear();
- }
+ testSystemProperties.setSystemProperty(name, value);
}
/**
* A predicate 'closure' used by {@link XTestCase#waitFor} method.
*/
- public static interface Predicate {
+ public interface Predicate {
/**
* Perform a predicate evaluation.
@@ -689,7 +398,7 @@ public abstract class XTestCase extends TestCase {
* @return the boolean result of the evaluation.
* @throws Exception thrown if the predicate evaluation could not evaluate.
*/
- public boolean evaluate() throws Exception;
+ boolean evaluate() throws Exception;
}
/**
@@ -741,45 +450,45 @@ public abstract class XTestCase extends TestCase {
}
/**
- * Return the Hadoop Job Tracker to use for testing. </p> The value is taken from the Java sytem property {@link
- * #OOZIE_TEST_JOB_TRACKER}, if this property is not set, the assumed value is 'locahost:9001'.
+ * Return the Hadoop Job Tracker to use for testing. </p> The value is taken from the Java sytem property
+ * {@link TestConstants#OOZIE_TEST_JOB_TRACKER}, if this property is not set, the assumed value is 'locahost:9001'.
*
* @return the job tracker URI.
*/
protected String getJobTrackerUri() {
- return System.getProperty(OOZIE_TEST_JOB_TRACKER, "localhost:9001");
+ return System.getProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "localhost:9001");
}
/**
- * Return the Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property {@link
- * #OOZIE_TEST_NAME_NODE}, if this property is not set, the assumed value is 'locahost:9000'.
+ * Return the Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property
+ * {@link TestConstants#OOZIE_TEST_NAME_NODE}, if this property is not set, the assumed value is 'locahost:9000'.
*
* @return the name node URI.
*/
protected String getNameNodeUri() {
- return System.getProperty(OOZIE_TEST_NAME_NODE, "hdfs://localhost:9000");
+ return System.getProperty(TestConstants.OOZIE_TEST_NAME_NODE, "hdfs://localhost:9000");
}
/**
- * Return the second Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property {@link
- * #OOZIE_TEST_NAME_NODE2}, if this property is not set, the assumed value is 'locahost:9100'.
+ * Return the second Hadoop Name Node to use for testing. </p> The value is taken from the Java sytem property
+ * {@link TestConstants#OOZIE_TEST_NAME_NODE2}, if this property is not set, the assumed value is 'locahost:9100'.
*
* @return the second name node URI.
*/
protected String getNameNode2Uri() {
- return System.getProperty(OOZIE_TEST_NAME_NODE2, "hdfs://localhost:9100");
+ return System.getProperty(TestConstants.OOZIE_TEST_NAME_NODE2, "hdfs://localhost:9100");
}
- public String getKeytabFile() {
+ protected String getKeytabFile() {
final String defaultFile = new File(System.getProperty("user.home"), "oozie.keytab").getAbsolutePath();
return System.getProperty("oozie.test.kerberos.keytab.file", defaultFile);
}
- public String getRealm() {
+ String getRealm() {
return System.getProperty("oozie.test.kerberos.realm", "LOCALHOST");
}
- public String getOoziePrincipal() {
+ protected String getOoziePrincipal() {
return System.getProperty("oozie.test.kerberos.oozie.principal",
getOozieUser() + "/localhost") + "@" + getRealm();
}
@@ -795,108 +504,7 @@ public abstract class XTestCase extends TestCase {
* @throws Exception
*/
protected final void cleanUpDBTables() throws Exception {
- // If the Services are already loaded, then a test is likely calling this for something specific and we shouldn't mess with
- // the Services; so just cleanup the database
- if (Services.get() != null) {
- cleanUpDBTablesInternal();
- }
- else {
- // Otherwise, this is probably being called during setup() and we should just load the minimal set of required Services
- // needed to cleanup the database and shut them down when done; the test will likely start its own Services later and
- // we don't want to interfere
- try {
- final Services services = new Services();
- services.getConf().set(Services.CONF_SERVICE_CLASSES, MINIMAL_SERVICES_FOR_DB_CLEANUP);
- services.init();
- cleanUpDBTablesInternal();
- } finally {
- if (Services.get() != null) {
- Services.get().destroy();
- }
- }
- }
- }
-
- private void cleanUpDBTablesInternal() throws StoreException {
- final EntityManager entityManager = Services.get().get(JPAService.class).getEntityManager();
- entityManager.setFlushMode(FlushModeType.COMMIT);
- entityManager.getTransaction().begin();
-
- Query q = entityManager.createNamedQuery("GET_WORKFLOWS");
- final List<WorkflowJobBean> wfjBeans = q.getResultList();
- final int wfjSize = wfjBeans.size();
- for (final WorkflowJobBean w : wfjBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_ACTIONS");
- final List<WorkflowActionBean> wfaBeans = q.getResultList();
- final int wfaSize = wfaBeans.size();
- for (final WorkflowActionBean w : wfaBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_COORD_JOBS");
- final List<CoordinatorJobBean> cojBeans = q.getResultList();
- final int cojSize = cojBeans.size();
- for (final CoordinatorJobBean w : cojBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_COORD_ACTIONS");
- final List<CoordinatorActionBean> coaBeans = q.getResultList();
- final int coaSize = coaBeans.size();
- for (final CoordinatorActionBean w : coaBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_BUNDLE_JOBS");
- final List<BundleJobBean> bjBeans = q.getResultList();
- final int bjSize = bjBeans.size();
- for (final BundleJobBean w : bjBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_BUNDLE_ACTIONS");
- final List<BundleActionBean> baBeans = q.getResultList();
- final int baSize = baBeans.size();
- for (final BundleActionBean w : baBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_SLA_EVENTS");
- final List<SLAEventBean> slaBeans = q.getResultList();
- final int slaSize = slaBeans.size();
- for (final SLAEventBean w : slaBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createQuery("select OBJECT(w) from SLARegistrationBean w");
- final List<SLARegistrationBean> slaRegBeans = q.getResultList();
- final int slaRegSize = slaRegBeans.size();
- for (final SLARegistrationBean w : slaRegBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createQuery("select OBJECT(w) from SLASummaryBean w");
- final List<SLASummaryBean> sdBeans = q.getResultList();
- final int ssSize = sdBeans.size();
- for (final SLASummaryBean w : sdBeans) {
- entityManager.remove(w);
- }
-
- entityManager.getTransaction().commit();
- entityManager.close();
- log.info(wfjSize + " entries in WF_JOBS removed from DB!");
- log.info(wfaSize + " entries in WF_ACTIONS removed from DB!");
- log.info(cojSize + " entries in COORD_JOBS removed from DB!");
- log.info(coaSize + " entries in COORD_ACTIONS removed from DB!");
- log.info(bjSize + " entries in BUNDLE_JOBS removed from DB!");
- log.info(baSize + " entries in BUNDLE_ACTIONS removed from DB!");
- log.info(slaSize + " entries in SLA_EVENTS removed from DB!");
- log.info(slaRegSize + " entries in SLA_REGISTRATION removed from DB!");
- log.info(ssSize + " entries in SLA_SUMMARY removed from DB!");
-
+ new TestDbCleaner().cleanDbTables();
}
private static MiniDFSCluster dfsCluster = null;
@@ -915,33 +523,24 @@ public abstract class XTestCase extends TestCase {
ClasspathUtils.setUsingMiniYarnCluster(true);
final int dataNodes = 2;
final String oozieUser = getOozieUser();
- final JobConf dfsConfig = createDFSConfig();
- final String[] userGroups = new String[]{getTestGroup(), getTestGroup2()};
- UserGroupInformation.createUserForTesting(oozieUser, userGroups);
- UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
- UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
- UserGroupInformation.createUserForTesting(getTestUser3(), new String[]{"users"});
+ final JobConf dfsConfig = testConfigurations.createDFSConfig(getOozieUser(), getTestGroup());
+
+ setupUgi(oozieUser);
try {
- dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
- .numDataNodes(dataNodes)
- .format(true)
- .racks(null)
- .build();
+ setupDfsCluster(dataNodes, dfsConfig);
createHdfsPathsAndSetupPermissions();
- final Configuration yarnConfig = createYarnConfig(dfsConfig);
- yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
- yarnCluster.init(yarnConfig);
- yarnCluster.start();
+ setupYarnCluster(dfsConfig);
+
final JobConf jobConf = new JobConf(yarnCluster.getConfig());
- System.setProperty(OOZIE_TEST_JOB_TRACKER, jobConf.get("mapred.job.tracker"));
+ System.setProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, jobConf.get("mapred.job.tracker"));
final String rmAddress = jobConf.get("yarn.resourcemanager.address");
if (rmAddress != null) {
- System.setProperty(OOZIE_TEST_JOB_TRACKER, rmAddress);
+ System.setProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, rmAddress);
}
- System.setProperty(OOZIE_TEST_NAME_NODE, dfsCluster.getFileSystem().getUri().toString());
+ System.setProperty(TestConstants.OOZIE_TEST_NAME_NODE, dfsCluster.getFileSystem().getUri().toString());
ProxyUsers.refreshSuperUserGroupsConfiguration(dfsConfig);
} catch (final Exception ex) {
shutdownMiniCluster();
@@ -951,6 +550,30 @@ public abstract class XTestCase extends TestCase {
}
}
+ private void setupDfsCluster(final int dataNodes, final JobConf dfsConfig) throws IOException {
+ dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
+ .numDataNodes(dataNodes)
+ .format(true)
+ .racks(null)
+ .build();
+ }
+
+ private void setupYarnCluster(final JobConf dfsConfig) {
+ final Configuration yarnConfig = testConfigurations.createYarnConfig(dfsConfig);
+ yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1, 1);
+ yarnCluster.init(yarnConfig);
+ yarnCluster.start();
+ }
+
+ private void setupUgi(final String oozieUser) {
+ final String[] userGroups = new String[]{getTestGroup(), getTestGroup2()};
+
+ UserGroupInformation.createUserForTesting(oozieUser, userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser3(), new String[]{"users"});
+ }
+
private void createHdfsPathsAndSetupPermissions() throws IOException {
final FileSystem fileSystem = dfsCluster.getFileSystem();
@@ -975,7 +598,7 @@ public abstract class XTestCase extends TestCase {
try {
System.setProperty("test.build.data", FilenameUtils.concat(testBuildDataSaved, "2"));
// Only DFS cluster is created based upon current need
- dfsCluster2 = new MiniDFSCluster(createDFSConfig(), 2, true, null);
+ dfsCluster2 = new MiniDFSCluster(testConfigurations.createDFSConfig(getOozieUser(), getTestGroup()), 2, true, null);
final FileSystem fileSystem = dfsCluster2.getFileSystem();
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("/user"));
@@ -983,7 +606,7 @@ public abstract class XTestCase extends TestCase {
fileSystem.setPermission(new Path("target/test-data"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
- System.setProperty(OOZIE_TEST_NAME_NODE2, fileSystem.getConf().get("fs.default.name"));
+ System.setProperty(TestConstants.OOZIE_TEST_NAME_NODE2, fileSystem.getConf().get("fs.default.name"));
} catch (final Exception ex) {
shutdownMiniCluster2();
throw ex;
@@ -994,44 +617,6 @@ public abstract class XTestCase extends TestCase {
}
}
- private JobConf createDFSConfig() throws UnknownHostException {
- final JobConf conf = new JobConf();
- conf.set("dfs.block.access.token.enable", "false");
- conf.set("dfs.permissions", "true");
- conf.set("hadoop.security.authentication", "simple");
-
- //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
- final StringBuilder sb = new StringBuilder();
- sb.append("127.0.0.1,localhost");
- for (final InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
- sb.append(",").append(i.getCanonicalHostName());
- }
- conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts", sb.toString());
-
- conf.set("hadoop.proxyuser." + getOozieUser() + ".groups", getTestGroup());
- conf.set("mapred.tasktracker.map.tasks.maximum", "4");
- conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
-
- conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
-
- // Scheduler properties required for YARN CapacityScheduler to work
- conf.set("yarn.scheduler.capacity.root.queues", "default");
- conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
- // Required to prevent deadlocks with YARN CapacityScheduler
- conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
-
- return conf;
- }
-
- private Configuration createYarnConfig(final Configuration parentConfig) {
- final Configuration yarnConfig = new YarnConfiguration(parentConfig);
-
- yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
- yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
-
- return yarnConfig;
- }
-
protected void setupHCatalogServer() throws Exception {
if (hcatServer == null) {
hcatServer = new MiniHCatServer(RUNMODE.SERVER, createJobConf());
@@ -1040,13 +625,8 @@ public abstract class XTestCase extends TestCase {
}
}
- protected void tearDownHCatalogServer() throws Exception {
+ private void tearDownHCatalogServer() throws Exception {
// TODO: This doesn't properly shutdown the metastore. For now, just keep the current one running once it's been started
-// if (hcatServer != null) {
-// hcatServer.shutdown();
-// hcatServer = null;
-// log.info("Metastore server shutdown");
-// }
}
protected void setupHiveServer2() throws Exception {
@@ -1067,7 +647,7 @@ public abstract class XTestCase extends TestCase {
}
}
- protected void tearDownHiveServer2() {
+ private void tearDownHiveServer2() {
if (hiveserver2 != null && hiveserver2.isStarted()) {
hiveserver2.stop();
hiveserver2 = null;
@@ -1117,12 +697,12 @@ public abstract class XTestCase extends TestCase {
private static class MiniClusterShutdownMonitor extends Thread {
- public MiniClusterShutdownMonitor() {
+ MiniClusterShutdownMonitor() {
setDaemon(true);
}
public void run() {
- final long shutdownWait = Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
+ final long shutdownWait = Long.parseLong(System.getProperty(TestConstants.TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT, "10")) * 1000;
LAST_TESTCASE_FINISHED.set(System.currentTimeMillis());
while (true) {
if (RUNNING_TESTCASES.get() == 0) {
@@ -1141,49 +721,29 @@ public abstract class XTestCase extends TestCase {
}
}
- @SuppressWarnings("deprecation")
- private JobConf createJobConfFromYarnCluster() {
- final JobConf jobConf = new JobConf();
- final JobConf jobConfYarn = new JobConf(yarnCluster.getConfig());
- for (final Entry<String, String> entry : jobConfYarn) {
- // MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
- // TODO call conf.unset after moving completely to Hadoop 2.x
- if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
- jobConf.set(entry.getKey(), entry.getValue());
- }
- }
- return jobConf;
- }
-
/**
* Returns a jobconf preconfigured to talk with the test cluster/minicluster.
* @return a jobconf preconfigured to talk with the test cluster/minicluster.
*/
protected JobConf createJobConf() throws IOException {
- final JobConf jobConf;
-
if (yarnCluster != null) {
- jobConf = createJobConfFromYarnCluster();
+ return testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
} else {
- jobConf = new JobConf();
- jobConf.set("mapred.job.tracker", getJobTrackerUri());
- jobConf.set("fs.default.name", getNameNodeUri());
+ return testConfigurations.createPristineJobConf(getJobTrackerUri(), getNameNodeUri());
}
-
- return jobConf;
}
/**
* A 'closure' used by {@link XTestCase#executeWhileJobTrackerIsShutdown} method.
*/
- public static interface ShutdownJobTrackerExecutable {
+ public interface ShutdownJobTrackerExecutable {
/**
* Execute some code
*
* @throws Exception thrown if the executed code throws an exception.
*/
- public void execute() throws Exception;
+ void execute() throws Exception;
}
/**
@@ -1202,32 +762,26 @@ public abstract class XTestCase extends TestCase {
protected Services setupServicesForHCatalog() throws ServiceException {
final Services services = new Services();
- setupServicesForHCataLogImpl(services);
+
+ setConfigurationForHCatalogImpl(services);
+
return services;
}
- private void setupServicesForHCataLogImpl(final Services services) {
- final Configuration conf = services.getConf();
- conf.set(Services.CONF_SERVICE_EXT_CLASSES,
- JMSAccessorService.class.getName() + "," +
- PartitionDependencyManagerService.class.getName() + "," +
- HCatAccessorService.class.getName());
- conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
- "default=java.naming.factory.initial#" + ActiveMQConnFactory + ";" +
- "java.naming.provider.url#" + localActiveMQBroker +
- "connectionFactoryNames#" + "ConnectionFactory");
- conf.set(URIHandlerService.URI_HANDLERS,
- FSURIHandler.class.getName() + "," + HCatURIHandler.class.getName());
+ private void setConfigurationForHCatalogImpl(final Services services) {
+ testConfigurations.setConfigurationForHCatalog(services);
+
setSystemProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
setSystemProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
}
protected Services setupServicesForHCatalog(final Services services) throws ServiceException {
- setupServicesForHCataLogImpl(services);
+ setConfigurationForHCatalogImpl(services);
+
return services;
}
- protected YarnApplicationState waitUntilYarnAppState(final String externalId, final EnumSet<YarnApplicationState> acceptedStates)
+ private YarnApplicationState waitUntilYarnAppState(final String externalId, final EnumSet<YarnApplicationState> acceptedStates)
throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
@@ -1265,7 +819,7 @@ public abstract class XTestCase extends TestCase {
assertEquals("YARN App state", YarnApplicationState.KILLED, state);
}
- protected YarnApplicationState getYarnApplicationState(String externalId) throws HadoopAccessorException, IOException, YarnException {
+ protected YarnApplicationState getYarnApplicationState(final String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
@@ -1305,9 +859,7 @@ public abstract class XTestCase extends TestCase {
}
}
- public TestLogAppender getTestLogAppender() {
+ protected TestLogAppender getTestLogAppender() {
return new TestLogAppender();
}
-
-}
-
+}
\ No newline at end of file
[32/48] oozie git commit: OOZIE-2729 Kill all actions after each unit
test. This fixes the scenario when multiple actions remained RUNNING during
execution of a test suite and had been consuming resources (memory, vcores).
Posted by pb...@apache.org.
OOZIE-2729 Kill all actions after each unit test. This fixes the scenario when multiple actions remained RUNNING during execution of a test suite and had been consuming resources (memory, vcores).
Change-Id: I02602c90371a02853361ebeaeab2e3529f76b47d
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/a6712816
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/a6712816
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/a6712816
Branch: refs/heads/oya
Commit: a67128165d067c15483acf3ed80531dbe3f6970e
Parents: 739d53a
Author: Andras Piros <an...@cloudera.com>
Authored: Fri Nov 25 10:47:02 2016 +0100
Committer: Andras Piros <an...@cloudera.com>
Committed: Fri Nov 25 11:09:15 2016 +0100
----------------------------------------------------------------------
.../oozie/command/wf/ActionStartXCommand.java | 2 +-
.../wf/TestWorkflowActionKillXCommand.java | 12 +-
.../java/org/apache/oozie/test/XTestCase.java | 15 +-
.../apache/oozie/action/hadoop/DistcpMain.java | 2 +-
.../apache/oozie/action/hadoop/HiveMain.java | 2 +-
.../apache/oozie/action/hadoop/Hive2Main.java | 2 +-
.../apache/oozie/action/hadoop/JavaMain.java | 2 +-
.../oozie/action/hadoop/LauncherMain.java | 76 --------
.../oozie/action/hadoop/MapReduceMain.java | 2 +-
.../oozie/action/hadoop/YarnJobActions.java | 181 +++++++++++++++++++
.../org/apache/oozie/action/hadoop/PigMain.java | 2 +-
.../apache/oozie/action/hadoop/SparkMain.java | 2 +-
.../apache/oozie/action/hadoop/SqoopMain.java | 2 +-
13 files changed, 210 insertions(+), 92 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
index edfac48..ca155b1 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
@@ -224,7 +224,7 @@ public class ActionStartXCommand extends ActionXCommand<org.apache.oozie.command
wfAction.setErrorInfo(null, null);
incrActionCounter(wfAction.getType(), 1);
- LOG.info("Start action [{0}] with user-retry state : userRetryCount [{1}], userRetryMax [{2}], userRetryInterval [{3}]",
+ LOG.info("Start action [{0}] with user-retry state : userRetryCount [{1}], userRetryMax [{2}], userRetryInterval [{3}]",
wfAction.getId(), wfAction.getUserRetryCount(), wfAction.getUserRetryMax(), wfAction
.getUserRetryInterval());
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index 4509586..1561ddc 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -31,6 +31,7 @@ import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.hadoop.LauncherMain;
import org.apache.oozie.action.hadoop.MapperReducerForTest;
+import org.apache.oozie.action.hadoop.YarnJobActions;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.executor.jpa.JPAExecutorException;
@@ -43,8 +44,6 @@ import org.apache.oozie.service.UUIDService;
import org.apache.oozie.test.XDataTestCase;
import org.apache.oozie.workflow.WorkflowInstance;
-import com.google.common.collect.Sets;
-
public class TestWorkflowActionKillXCommand extends XDataTestCase {
private Services services;
@@ -172,8 +171,13 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
System.setProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME, String.valueOf(System.currentTimeMillis()));
jobClient.submitJob(jobConf);
- Set<ApplicationId> apps = Sets.newHashSet();
- apps = LauncherMain.getChildYarnJobs(jobConf, ApplicationsRequestScope.ALL);
+
+ final Set<ApplicationId> apps = new YarnJobActions.Builder(jobConf, ApplicationsRequestScope.ALL)
+ .checkApplicationTags(true)
+ .checkStartRange(true)
+ .build()
+ .getYarnJobs();
+
assertEquals("Number of YARN apps", apps.size(), 1);
sleepjob.close();
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index b29556c..53d0a97 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -44,14 +44,15 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
+import org.apache.oozie.action.hadoop.YarnJobActions;
import org.apache.oozie.service.ConfigurationService;
import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
@@ -666,17 +667,25 @@ public abstract class XTestCase extends TestCase {
private static void shutdownMiniCluster() {
try {
if (yarnCluster != null) {
+ final YarnJobActions yarnJobActions =
+ new YarnJobActions.Builder(yarnCluster.getConfig(), ApplicationsRequestScope.ALL)
+ .build();
+ final Set<ApplicationId> allYarnJobs = yarnJobActions.getYarnJobs();
+
+ yarnJobActions.killSelectedYarnJobs(allYarnJobs);
+
yarnCluster.stop();
}
} catch (final Exception ex) {
- System.out.println(ex);
+ System.out.println(ex.getMessage());
}
+
try {
if (dfsCluster != null) {
dfsCluster.shutdown();
}
} catch (final Exception ex) {
- System.out.println(ex);
+ System.out.println(ex.getMessage());
}
// This is tied to the MiniCluster because it inherits configs from there
hs2Config = null;
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------
diff --git a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
index 40c81ba..fc37e3c 100644
--- a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
+++ b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
@@ -51,7 +51,7 @@ public class DistcpMain extends JavaMain {
protected void run(String[] args) throws Exception {
Configuration actionConf = loadActionConf();
- LauncherMain.killChildYarnJobs(actionConf);
+ YarnJobActions.killChildYarnJobs(actionConf);
String logFile = setUpDistcpLog4J(actionConf);
Class<?> klass = actionConf.getClass(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS,
org.apache.hadoop.tools.DistCp.class);
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
index 242cd6c..320dbd5 100644
--- a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
+++ b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
@@ -306,7 +306,7 @@ public class HiveMain extends LauncherMain {
}
System.out.println();
- LauncherMain.killChildYarnJobs(hiveConf);
+ YarnJobActions.killChildYarnJobs(hiveConf);
System.out.println("=================================================================");
System.out.println();
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
index b418b89..8bf233f 100644
--- a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
+++ b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
@@ -229,7 +229,7 @@ public class Hive2Main extends LauncherMain {
}
System.out.println();
- LauncherMain.killChildYarnJobs(actionConf);
+ YarnJobActions.killChildYarnJobs(actionConf);
System.out.println("=================================================================");
System.out.println();
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
index 0815318..e9c5585 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
@@ -44,7 +44,7 @@ public class JavaMain extends LauncherMain {
setApplicationTags(actionConf, TEZ_APPLICATION_TAGS);
setApplicationTags(actionConf, SPARK_YARN_TAGS);
- LauncherMain.killChildYarnJobs(actionConf);
+ YarnJobActions.killChildYarnJobs(actionConf);
Class<?> klass = actionConf.getClass(JAVA_MAIN_CLASS, Object.class);
System.out.println("Java action main class : " + klass.getName());
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
index 31200af..0ce2055 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
@@ -125,82 +125,6 @@ public abstract class LauncherMain {
}
}
- public static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
- return getChildYarnJobs(actionConf, ApplicationsRequestScope.OWN);
- }
-
- public static Set<ApplicationId> getChildYarnJobs(Configuration actionConf, ApplicationsRequestScope scope) {
- System.out.println("Fetching child yarn jobs");
- Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
- String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
- if (tag == null) {
- System.out.print("Could not find Yarn tags property " + CHILD_MAPREDUCE_JOB_TAGS);
- return childYarnJobs;
- }
- System.out.println("tag id : " + tag);
- long startTime = 0L;
- try {
- startTime = Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
- } catch(NumberFormatException nfe) {
- throw new RuntimeException("Could not find Oozie job launch time", nfe);
- }
-
- GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
- gar.setScope(scope);
- gar.setApplicationTags(Collections.singleton(tag));
-
- long endTime = System.currentTimeMillis();
- if (startTime > endTime) {
- System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
- "Attempting to work around...");
- // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
- // offset in both directions
- long diff = 2 * (startTime - endTime);
- startTime = startTime - diff;
- endTime = endTime + diff;
- }
- gar.setStartRange(startTime, endTime);
- try {
- ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(actionConf, ApplicationClientProtocol.class);
- GetApplicationsResponse apps = proxy.getApplications(gar);
- List<ApplicationReport> appsList = apps.getApplicationList();
- for(ApplicationReport appReport : appsList) {
- childYarnJobs.add(appReport.getApplicationId());
- }
- } catch (IOException ioe) {
- throw new RuntimeException("Exception occurred while finding child jobs", ioe);
- } catch (YarnException ye) {
- throw new RuntimeException("Exception occurred while finding child jobs", ye);
- }
-
- System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
- return childYarnJobs;
- }
-
- public static void killChildYarnJobs(Configuration actionConf) {
- try {
- Set<ApplicationId> childYarnJobs = getChildYarnJobs(actionConf);
- if (!childYarnJobs.isEmpty()) {
- System.out.println();
- System.out.println("Found [" + childYarnJobs.size() + "] Map-Reduce jobs from this launcher");
- System.out.println("Killing existing jobs and starting over:");
- YarnClient yarnClient = YarnClient.createYarnClient();
- yarnClient.init(actionConf);
- yarnClient.start();
- for (ApplicationId app : childYarnJobs) {
- System.out.print("Killing job [" + app + "] ... ");
- yarnClient.killApplication(app);
- System.out.println("Done");
- }
- System.out.println();
- }
- } catch (YarnException ye) {
- throw new RuntimeException("Exception occurred while killing child job(s)", ye);
- } catch (IOException ioe) {
- throw new RuntimeException("Exception occurred while killing child job(s)", ioe);
- }
- }
-
protected abstract void run(String[] args) throws Exception;
/**
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
index 16cf4b1..ba7324f 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
@@ -50,7 +50,7 @@ public class MapReduceMain extends LauncherMain {
JobConf jobConf = new JobConf();
addActionConf(jobConf, actionConf);
- LauncherMain.killChildYarnJobs(jobConf);
+ YarnJobActions.killChildYarnJobs(jobConf);
// Run a config class if given to update the job conf
runConfigClass(jobConf);
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
new file mode 100644
index 0000000..0224362
--- /dev/null
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.action.hadoop;
+
+import com.google.common.collect.Sets;
+import com.google.common.io.Closeables;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.client.ClientRMProxy;
+import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+public class YarnJobActions {
+ private final Configuration configuration;
+ private final ApplicationsRequestScope scope;
+ private final boolean checkApplicationTags;
+ private final boolean checkStartRange;
+
+ private YarnJobActions(final Configuration configuration,
+ final ApplicationsRequestScope scope,
+ final boolean checkApplicationTags,
+ final boolean checkStartRange) {
+ this.configuration = configuration;
+ this.scope = scope;
+ this.checkApplicationTags = checkApplicationTags;
+ this.checkStartRange = checkStartRange;
+ }
+
+ public Set<ApplicationId> getYarnJobs() {
+ System.out.println(String.format("Fetching yarn jobs. [scope=%s;checkApplicationTags=%s;checkStartRange=%s]",
+ scope, checkApplicationTags, checkStartRange));
+
+ final Set<ApplicationId> childYarnJobs = Sets.newHashSet();
+ final GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
+ gar.setScope(scope);
+
+ if (checkApplicationTags) {
+ final String tag = configuration.get(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS);
+ if (tag == null) {
+ System.out.println("Could not find Yarn tags property " + LauncherMain.CHILD_MAPREDUCE_JOB_TAGS);
+ return childYarnJobs;
+ }
+ System.out.println("tag id : " + tag);
+ gar.setApplicationTags(Collections.singleton(tag));
+ }
+
+ if (checkStartRange) {
+ long startTime;
+ try {
+ startTime = Long.parseLong(System.getProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME));
+ } catch (final NumberFormatException nfe) {
+ throw new RuntimeException("Could not find Oozie job launch time", nfe);
+ }
+
+
+ long endTime = System.currentTimeMillis();
+ if (startTime > endTime) {
+ System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
+ "Attempting to work around...");
+ // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
+ // offset in both directions
+ final long diff = 2 * (startTime - endTime);
+ startTime = startTime - diff;
+ endTime = endTime + diff;
+ }
+ gar.setStartRange(startTime, endTime);
+ }
+
+ try {
+ final ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(configuration, ApplicationClientProtocol.class);
+ final GetApplicationsResponse apps = proxy.getApplications(gar);
+ final List<ApplicationReport> appsList = apps.getApplicationList();
+ for (final ApplicationReport appReport : appsList) {
+ childYarnJobs.add(appReport.getApplicationId());
+ }
+ } catch (final IOException | YarnException e) {
+ throw new RuntimeException("Exception occurred while finding child jobs", e);
+ }
+
+ System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
+ return childYarnJobs;
+ }
+
+ static void killChildYarnJobs(final Configuration actionConf) {
+ final YarnJobActions yarnJobActions = new Builder(actionConf, ApplicationsRequestScope.OWN)
+ .build();
+ final Set<ApplicationId> childYarnJobs = yarnJobActions.getYarnJobs();
+
+ yarnJobActions.killSelectedYarnJobs(childYarnJobs);
+ }
+
+ public void killSelectedYarnJobs(final Set<ApplicationId> selectedApplicationIds) {
+ final YarnClient yarnClient = createYarnClient();
+
+ try {
+ if (!selectedApplicationIds.isEmpty()) {
+ System.out.println("");
+ System.out.println("Found [" + selectedApplicationIds.size() + "] Map-Reduce jobs from this launcher");
+ System.out.println("Killing existing jobs and starting over:");
+
+ for (final ApplicationId app : selectedApplicationIds) {
+ System.out.println("Killing job [" + app + "] ... ");
+
+ yarnClient.killApplication(app);
+
+ System.out.println("Done");
+ }
+
+ System.out.println("");
+ }
+ } catch (final YarnException | IOException e) {
+ throw new RuntimeException("Exception occurred while killing child job(s)", e);
+ } finally {
+ Closeables.closeQuietly(yarnClient);
+ }
+ }
+
+ private YarnClient createYarnClient() {
+ final YarnClient yarnClient = YarnClient.createYarnClient();
+
+ yarnClient.init(configuration);
+ yarnClient.start();
+
+ return yarnClient;
+ }
+
+ public static class Builder {
+ private final Configuration configuration;
+ private final ApplicationsRequestScope scope;
+ private boolean checkApplicationTags = false;
+ private boolean checkStartRange = false;
+
+ public Builder(final Configuration configuration, final ApplicationsRequestScope scope) {
+ this.configuration = configuration;
+ this.scope = scope;
+ }
+
+ public Builder checkApplicationTags(final boolean checkApplicationTags) {
+ this.checkApplicationTags = checkApplicationTags;
+
+ return this;
+ }
+
+ public Builder checkStartRange(final boolean checkStartRange) {
+ this.checkStartRange = checkStartRange;
+
+ return this;
+ }
+
+ public YarnJobActions build() {
+ return new YarnJobActions(configuration, scope, checkApplicationTags, checkStartRange);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index 0029dd0..98929c0 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -217,7 +217,7 @@ public class PigMain extends LauncherMain {
System.out.println(" " + arg);
}
- LauncherMain.killChildYarnJobs(actionConf);
+ YarnJobActions.killChildYarnJobs(actionConf);
System.out.println("=================================================================");
System.out.println();
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
index f278075..7f35cc1 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
@@ -85,7 +85,7 @@ public class SparkMain extends LauncherMain {
prepareHadoopConfig(actionConf);
setYarnTag(actionConf);
- LauncherMain.killChildYarnJobs(actionConf);
+ YarnJobActions.killChildYarnJobs(actionConf);
String logFile = setUpSparkLog4J(actionConf);
List<String> sparkArgs = new ArrayList<String>();
http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
index 92c960f..29e2966 100644
--- a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
+++ b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
@@ -171,7 +171,7 @@ public class SqoopMain extends LauncherMain {
System.out.println(" " + arg);
}
- LauncherMain.killChildYarnJobs(sqoopConf);
+ YarnJobActions.killChildYarnJobs(sqoopConf);
System.out.println("=================================================================");
System.out.println();
[05/48] oozie git commit: OOZIE-2591 fix
TestWorkflowActionKillXCommand and refactor TestJavaActionExecutor
Posted by pb...@apache.org.
OOZIE-2591 fix TestWorkflowActionKillXCommand and refactor TestJavaActionExecutor
Change-Id: I556684dee7a04a931e6cf1b33de563b7ba4828b2
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/61f3a9f0
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/61f3a9f0
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/61f3a9f0
Branch: refs/heads/oya
Commit: 61f3a9f083b5085bbc575d7e9d251aec03bfcae4
Parents: 9e2acd0
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Thu Nov 10 12:27:08 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Thu Nov 10 16:06:34 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/TestJavaActionExecutor.java | 33 +++++++---------
.../wf/TestWorkflowActionKillXCommand.java | 37 +++++++++---------
.../java/org/apache/oozie/test/XTestCase.java | 40 +++++++-------------
.../oozie/action/hadoop/LauncherMain.java | 9 ++++-
4 files changed, 54 insertions(+), 65 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/61f3a9f0/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index 5d8bf34..bfc8ab4 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@ -27,7 +27,6 @@ import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.io.Writer;
import java.net.URI;
-import java.security.PrivilegedExceptionAction;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collections;
@@ -35,10 +34,8 @@ import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
-import java.util.Set;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.examples.SleepJob;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -63,7 +60,6 @@ import org.apache.oozie.service.ShareLibService;
import org.apache.oozie.service.UUIDService;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.service.WorkflowStoreService;
-import org.apache.oozie.service.UserGroupInformationService;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XmlUtils;
@@ -100,7 +96,6 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
}
- @SuppressWarnings("unchecked")
public void testSetupMethods() throws Exception {
JavaActionExecutor ae = new JavaActionExecutor();
assertEquals(Arrays.asList(JavaMain.class), ae.getLauncherClasses());
@@ -365,7 +360,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
"</java>";
Context context = createContext(actionXml, null);
submitAction(context);
- waitUntilYarnAppState(context.getAction().getExternalId(), YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(context.getAction().getExternalId());
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
@@ -385,7 +380,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
"</java>";
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
@@ -410,7 +405,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
"</java>";
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
ActionExecutor ae = new JavaActionExecutor();
try {
ae.check(context, context.getAction());
@@ -441,7 +436,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
final String runningJobId = submitAction(context);
ActionExecutor ae = new JavaActionExecutor();
assertFalse(ae.isCompleted(context.getAction().getExternalStatus()));
- waitUntilYarnAppState(runningJobId, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJobId);
ae.check(context, context.getAction());
assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
assertNull(context.getAction().getData());
@@ -460,7 +455,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
@@ -481,7 +476,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
//FIXME assertFalse(LauncherMapperHelper.isMainSuccessful(runningJob));
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
@@ -504,7 +499,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
//FIXME assertFalse(LauncherMapperHelper.isMainSuccessful(runningJob));
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
@@ -526,7 +521,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
//FIXME assertFalse(LauncherMapperHelper.isMainSuccessful(runningJob));
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
@@ -551,7 +546,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
assertEquals("KILLED", context.getAction().getExternalStatus());
assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
- waitUntilYarnAppState(runningJob, YarnApplicationState.KILLED);
+ waitUntilYarnAppKilledAndAssertSuccess(runningJob);
}
@@ -827,7 +822,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
"</java>";
Context context = createContext(actionXml, null);
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
ActionExecutor ae = new JavaActionExecutor();
ae.check(context, context.getAction());
assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
@@ -1876,13 +1871,13 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// Test when server side setting is not enabled
JobConf launcherConf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
- assertEquals("true", launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
+ assertEquals("false", launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED)); // disabled by default
ConfigurationService.set("oozie.action.launcher." + JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED, "true");
// Test when server side setting is enabled but tez-site.xml is not in DistributedCache
launcherConf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
- assertEquals("true", launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
+ assertEquals("false", launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
final Path tezSite = new Path("/tmp/tez-site.xml");
final FSDataOutputStream out = getFileSystem().create(tezSite);
@@ -2202,7 +2197,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
ConfigurationService.set("oozie.action.sharelib.for.java", "java");
final String runningJob = submitAction(context);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
}
public void testJobSubmissionWithoutYarnKill() throws Exception {
@@ -2236,7 +2231,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
ConfigurationService.setBoolean(JavaActionExecutor.HADOOP_YARN_KILL_CHILD_JOBS_ON_AMRESTART, false);
final String runningJob = submitAction(context, ae);
- waitUntilYarnAppState(runningJob, YarnApplicationState.FINISHED);
+ waitUntilYarnAppDoneAndAssertSuccess(runningJob);
}
public void testDefaultConfigurationInLauncher() throws Exception {
http://git-wip-us.apache.org/repos/asf/oozie/blob/61f3a9f0/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index 72f0114..71b46d1 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -18,19 +18,18 @@
package org.apache.oozie.command.wf;
-import java.io.StringReader;
import java.net.URI;
import java.util.Date;
+import java.util.Set;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.examples.SleepJob;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
+import org.apache.oozie.action.hadoop.LauncherMain;
import org.apache.oozie.action.hadoop.MapperReducerForTest;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
@@ -42,11 +41,10 @@ import org.apache.oozie.service.JPAService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.UUIDService;
import org.apache.oozie.test.XDataTestCase;
-import org.apache.oozie.test.XTestCase.Predicate;
-import org.apache.oozie.util.XConfiguration;
-import org.apache.oozie.util.XmlUtils;
import org.apache.oozie.workflow.WorkflowInstance;
+import com.google.common.collect.Sets;
+
public class TestWorkflowActionKillXCommand extends XDataTestCase {
private Services services;
@@ -117,6 +115,7 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
assertEquals(action.getExternalStatus(), "RUNNING");
}
+ // FIXME - fix JAE.kill()
public void testWfActionKillChildJob() throws Exception {
String externalJobID = launchSleepJob(1000);
String childId = launchSleepJob(1000000);
@@ -126,15 +125,8 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
WorkflowAction.Status.KILLED, childId);
new ActionKillXCommand(action.getId()).call();
- JobClient jobClient = createJobClient();
- final RunningJob mrJob = jobClient.getJob(JobID.forName(childId));
- waitFor(60 * 1000, new Predicate() {
- public boolean evaluate() throws Exception {
- return mrJob.isComplete();
- }
- });
- assertEquals(mrJob.getJobState(), JobStatus.KILLED);
+ waitUntilYarnAppKilledAndAssertSuccess(childId);
}
protected WorkflowActionBean addRecordToWfActionTable(String wfId, String externalJobID, String actionName,
@@ -189,9 +181,18 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
SleepJob sleepjob = new SleepJob();
sleepjob.setConf(jobConf);
jobConf = sleepjob.setupJobConf(1, 1, sleep, 1, sleep, 1);
+ jobConf.set(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS, "sleepjob");
+ jobConf.set(LauncherMain.MAPREDUCE_JOB_TAGS, "sleepjob");
+ System.setProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME, String.valueOf(System.currentTimeMillis()));
+
+ jobClient.submitJob(jobConf);
+ Set<ApplicationId> apps = Sets.newHashSet();
+ apps = LauncherMain.getChildYarnJobs(jobConf, ApplicationsRequestScope.ALL);
+ assertEquals("Number of YARN apps", apps.size(), 1);
+
+ sleepjob.close();
- final RunningJob runningJob = jobClient.submitJob(jobConf);
- return runningJob.getID().toString();
+ return apps.iterator().next().toString();
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/61f3a9f0/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index f0c79b6..fd6d4ad 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -35,7 +35,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
-import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
@@ -44,10 +43,8 @@ import javax.persistence.FlushModeType;
import javax.persistence.Query;
import junit.framework.TestCase;
-import net.sf.ehcache.store.compound.ImmutableValueElementCopyStrategy;
import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
@@ -92,7 +89,6 @@ import org.apache.oozie.sla.SLARegistrationBean;
import org.apache.oozie.sla.SLASummaryBean;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.test.MiniHCatServer.RUNMODE;
-import org.apache.oozie.test.XTestCase.Predicate;
import org.apache.oozie.test.hive.MiniHS2;
import org.apache.oozie.util.ClasspathUtils;
import org.apache.oozie.util.IOUtils;
@@ -100,11 +96,6 @@ import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
-import com.google.common.base.Enums;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
/**
* Base JUnit <code>TestCase</code> subclass used by all Oozie testcases.
* <p/>
@@ -122,6 +113,7 @@ import com.google.common.collect.Sets;
* From within testcases, system properties must be changed using the {@link #setSystemProperty} method.
*/
public abstract class XTestCase extends TestCase {
+ private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES = EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
private Map<String, String> sysProps;
private String testCaseDir;
private String testCaseConfDir;
@@ -1235,48 +1227,44 @@ public abstract class XTestCase extends TestCase {
return services;
}
- protected YarnApplicationState waitUntilYarnAppState(String externalId, final YarnApplicationState... acceptedStates)
+ protected YarnApplicationState waitUntilYarnAppState(String externalId, final EnumSet<YarnApplicationState> acceptedStates)
throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
- final Set<YarnApplicationState> states = Sets.immutableEnumSet(Lists.newArrayList(acceptedStates));
- final MutableBoolean endStateOK = new MutableBoolean(false);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
- // This is needed here because we need a mutable final YarnClient
- final MutableObject<YarnClient> yarnClientMO = new MutableObject<YarnClient>(null);
+ final YarnClient yarnClient = Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), jobConf);
+
try {
- yarnClientMO.setValue(Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), jobConf));
waitFor(60 * 1000, new Predicate() {
@Override
public boolean evaluate() throws Exception {
- YarnApplicationState state = yarnClientMO.getValue().getApplicationReport(appId).getYarnApplicationState();
+ YarnApplicationState state = yarnClient.getApplicationReport(appId).getYarnApplicationState();
finalState.setValue(state);
- if (states.contains(state)) {
- endStateOK.setValue(true);
- return true;
- } else {
- return false;
- }
+ return acceptedStates.contains(state);
}
});
} finally {
- if (yarnClientMO.getValue() != null) {
- yarnClientMO.getValue().close();
+ if (yarnClient != null) {
+ yarnClient.close();
}
}
log.info("Final state is: {0}", finalState.getValue());
- assertTrue(endStateOK.isTrue());
return finalState.getValue();
}
protected void waitUntilYarnAppDoneAndAssertSuccess(String externalId) throws HadoopAccessorException, IOException, YarnException {
- YarnApplicationState state = waitUntilYarnAppState(externalId, YarnApplicationState.FAILED, YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
+ YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
assertEquals("YARN App state", YarnApplicationState.FINISHED, state);
}
+ protected void waitUntilYarnAppKilledAndAssertSuccess(String externalId) throws HadoopAccessorException, IOException, YarnException {
+ YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ assertEquals("YARN App state", YarnApplicationState.KILLED, state);
+ }
+
protected YarnApplicationState getYarnApplicationState(String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
http://git-wip-us.apache.org/repos/asf/oozie/blob/61f3a9f0/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
index 338bce8..31200af 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
@@ -125,7 +125,11 @@ public abstract class LauncherMain {
}
}
- private static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
+ public static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
+ return getChildYarnJobs(actionConf, ApplicationsRequestScope.OWN);
+ }
+
+ public static Set<ApplicationId> getChildYarnJobs(Configuration actionConf, ApplicationsRequestScope scope) {
System.out.println("Fetching child yarn jobs");
Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
@@ -142,8 +146,9 @@ public abstract class LauncherMain {
}
GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
- gar.setScope(ApplicationsRequestScope.OWN);
+ gar.setScope(scope);
gar.setApplicationTags(Collections.singleton(tag));
+
long endTime = System.currentTimeMillis();
if (startTime > endTime) {
System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
[22/48] oozie git commit: OOZIE-2729 change JT to RM in methods
Posted by pb...@apache.org.
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java b/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
index 6ab9fce..5e5e94a 100644
--- a/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
+++ b/sharelib/pig/src/test/java/org/apache/oozie/action/hadoop/TestPigActionExecutor.java
@@ -92,7 +92,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
assertEquals(classes, ae.getLauncherClasses());
Element actionXml = XmlUtils.parseXml("<pig>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<script>SCRIPT</script>" +
"<param>a=A</param>" +
@@ -342,7 +342,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
w.close();
String actionXml = "<pig>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
setPigConfig(writeStats).toXmlString(false) +
"<script>" + script.getName() + "</script>" +
@@ -376,7 +376,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
w.close();
String actionXml = "<pig>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
setPigConfig(true).toXmlString(false) +
"<script>" + script.getName() + "</script>" +
@@ -421,7 +421,7 @@ public class TestPigActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<pig>" +
- " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <script>id.pig</script>" +
" <file>" + jar.toString() +
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
index 9d8d4aa..a0ff58b 100644
--- a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
+++ b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestPyspark.java
@@ -24,15 +24,9 @@ import java.util.ArrayList;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
@@ -61,7 +55,7 @@ public class TestPyspark extends ActionExecutorTestCase {
"<jar>" + PI_EXAMPLE + "</jar>" +
"<spark-opts>" +sparkOpts +"</spark-opts>" +
"</spark>";
- return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri());
+ return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri());
}
public void testPyspark() throws Exception {
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
index d97f1f0..51e412c 100644
--- a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
+++ b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkActionExecutor.java
@@ -21,16 +21,10 @@ package org.apache.oozie.action.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
-import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.SparkConfigurationService;
import org.apache.oozie.service.WorkflowAppService;
@@ -49,7 +43,6 @@ import java.io.Writer;
import java.text.MessageFormat;
import java.util.Arrays;
-import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
@@ -92,7 +85,7 @@ public class TestSparkActionExecutor extends ActionExecutorTestCase {
SparkConfigurationService scs = Services.get().get(SparkConfigurationService.class);
scs.destroy();
ConfigurationService.set("oozie.service.SparkConfigurationService.spark.configurations",
- getJobTrackerUri() + "=" + sparkConfDir.getAbsolutePath());
+ getResourceManagerUri() + "=" + sparkConfDir.getAbsolutePath());
scs.init(Services.get());
_testSetupMethods("local[*]", new HashMap<String, String>(), "client");
@@ -109,7 +102,7 @@ public class TestSparkActionExecutor extends ActionExecutorTestCase {
assertEquals(Arrays.asList(SparkMain.class), ae.getLauncherClasses());
Element actionXml = XmlUtils.parseXml("<spark>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<master>" + master + "</master>" +
(mode != null ? "<mode>" + mode + "</mode>" : "") +
@@ -162,7 +155,7 @@ public class TestSparkActionExecutor extends ActionExecutorTestCase {
"<arg>" + getAppPath() + "/" + OUTPUT + "</arg>" +
"<spark-opts>--conf " +SPARK_TESTING_MEMORY+"</spark-opts>"+
"</spark>";
- return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri());
+ return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri());
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
index 61d295c..691752d 100644
--- a/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
+++ b/sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopActionExecutor.java
@@ -22,21 +22,12 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
-import org.apache.oozie.util.XmlUtils;
-import org.jdom.Element;
-import org.jdom.Namespace;
import java.io.BufferedReader;
import java.io.File;
@@ -44,7 +35,6 @@ import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
-import java.io.StringReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
@@ -52,7 +42,6 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
public class TestSqoopActionExecutor extends ActionExecutorTestCase {
@@ -147,19 +136,19 @@ public class TestSqoopActionExecutor extends ActionExecutorTestCase {
private String getActionXml() {
String command = MessageFormat.format(SQOOP_COMMAND, getActionJdbcUri(), getSqoopOutputDir());
- return MessageFormat.format(SQOOP_ACTION_COMMAND_XML, getJobTrackerUri(), getNameNodeUri(),
+ return MessageFormat.format(SQOOP_ACTION_COMMAND_XML, getResourceManagerUri(), getNameNodeUri(),
"dummy", "dummyValue", command);
}
private String getActionXmlEval() {
String query = "select TT.I, TT.S from TT";
- return MessageFormat.format(SQOOP_ACTION_EVAL_XML, getJobTrackerUri(), getNameNodeUri(),
+ return MessageFormat.format(SQOOP_ACTION_EVAL_XML, getResourceManagerUri(), getNameNodeUri(),
getActionJdbcUri(), query);
}
private String getActionXmlFreeFromQuery() {
String query = "select TT.I, TT.S from TT where $CONDITIONS";
- return MessageFormat.format(SQOOP_ACTION_ARGS_XML, getJobTrackerUri(), getNameNodeUri(),
+ return MessageFormat.format(SQOOP_ACTION_ARGS_XML, getResourceManagerUri(), getNameNodeUri(),
getActionJdbcUri(), query, getSqoopOutputDir());
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ddbd90f8/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
index 660efd5..a21b7c7 100644
--- a/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
+++ b/sharelib/streaming/src/test/java/org/apache/oozie/action/hadoop/TestMapReduceActionExecutor.java
@@ -85,7 +85,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
}
public Element createUberJarActionXML(String uberJarPath, String additional) throws Exception{
- return XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ return XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + additional + "<configuration>"
+ "<property><name>oozie.mapreduce.uber.jar</name><value>" + uberJarPath + "</value></property>"
+ "</configuration>" + "</map-reduce>");
@@ -124,7 +124,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
Configuration conf = new XConfiguration();
conf.set("nameNode", getNameNodeUri());
- conf.set("jobTracker", getJobTrackerUri());
+ conf.set("jobTracker", getResourceManagerUri());
conf.set(OozieClient.USER_NAME, getTestUser());
conf.set(OozieClient.APP_PATH, new File(getTestCaseDir(), "workflow.xml").toURI().toString());
conf.set(OozieClient.LOG_TOKEN, "t");
@@ -150,7 +150,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
Element eConf = eAction.getChild("name-node", eAction.getNamespace());
assertEquals(getNameNodeUri(), eConf.getText());
eConf = eAction.getChild("job-tracker", eAction.getNamespace());
- assertEquals(getJobTrackerUri(), eConf.getText());
+ assertEquals(getResourceManagerUri(), eConf.getText());
// check other m-r settings
eConf = eAction.getChild("configuration", eAction.getNamespace());
@@ -219,7 +219,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
List<Class<?>> classes = Arrays.<Class<?>>asList(StreamingMain.class);
assertEquals(classes, ae.getLauncherClasses());
- Element actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ Element actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.input.dir</name><value>IN</value></property>"
+ "<property><name>mapred.output.dir</name><value>OUT</value></property>" + "</configuration>"
@@ -281,7 +281,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
launcherJobConf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, conf);
assertNull(launcherJobConf.getJar()); // same for launcher conf (not set)
- actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "</map-reduce>");
conf = ae.createBaseHadoopConf(context, actionXml);
ae.setupActionConf(conf, context, actionXml, getFsTestCaseDir());
@@ -304,7 +304,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
}
serv.getConf().setBoolean("oozie.action.mapreduce.uber.jar.enable", originalUberJarDisabled);
- actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<streaming>" + "<mapper>M</mapper>"
+ "<reducer>R</reducer>" + "<record-reader>RR</record-reader>"
+ "<record-reader-mapping>RRM1=1</record-reader-mapping>"
@@ -322,7 +322,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
assertEquals("2", conf.get("oozie.streaming.record-reader-mapping.size"));
assertEquals("2", conf.get("oozie.streaming.env.size"));
- actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ actionXml = XmlUtils.parseXml("<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<pipes>" + "<map>M</map>" + "<reduce>R</reduce>"
+ "<inputformat>IF</inputformat>" + "<partitioner>P</partitioner>" + "<writer>W</writer>"
+ "<program>PP</program>" + "</pipes>" + "<configuration>"
@@ -533,7 +533,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "</map-reduce>";
_testSubmit(MAP_REDUCE, actionXml);
@@ -554,7 +554,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
ow.close();
String actionXml = "<map-reduce>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<configuration>" +
"<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName() +
@@ -584,7 +584,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
XConfiguration conf = getMapReduceConfig(inputDir.toString(), outputDir.toString());
conf.set(MapperReducerForTest.JOB_XML_OUTPUT_LOCATION, jobXml.toUri().toString());
conf.set("B", "b");
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ conf.toXmlString(false)
+ "<config-class>" + OozieActionConfiguratorForTest.class.getName() + "</config-class>" + "</map-reduce>";
@@ -608,7 +608,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceConfig(inputDir.toString(), outputDir.toString()).toXmlString(false)
+ "<config-class>org.apache.oozie.does.not.exist</config-class>" + "</map-reduce>";
@@ -638,7 +638,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
XConfiguration conf = getMapReduceConfig(inputDir.toString(), outputDir.toString());
conf.setBoolean("oozie.test.throw.exception", true); // causes OozieActionConfiguratorForTest to throw an exception
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ conf.toXmlString(false)
+ "<config-class>" + OozieActionConfiguratorForTest.class.getName() + "</config-class>" + "</map-reduce>";
@@ -665,7 +665,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceCredentialsConfig(inputDir.toString(), outputDir.toString()).toXmlString(false)
+ "</map-reduce>";
@@ -729,7 +729,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ getMapReduceUberJarConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "</map-reduce>";
String jobID = _testSubmit(MAP_REDUCE, actionXml);
@@ -817,7 +817,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + " <streaming>" + " <mapper>cat</mapper>"
+ " <reducer>wc</reducer>" + " </streaming>"
+ streamingConf.toXmlString(false) + "<file>"
@@ -907,7 +907,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + " <pipes>" + " <program>" + programPath
+ "#wordcount-simple" + "</program>" + " </pipes>"
+ getPipesConfig(inputDir.toString(), outputDir.toString()).toXmlString(false) + "<file>"
@@ -938,7 +938,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
// configuration.
String actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1010,7 +1010,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
// configuration.
String actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1077,7 +1077,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
// configuration.
String actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1116,7 +1116,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
actionXml = "<map-reduce>"
+ "<job-tracker>"
- + getJobTrackerUri()
+ + getResourceManagerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1161,7 +1161,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
mrConfig.set("mapred.job.name", mapredJobName);
StringBuilder sb = new StringBuilder("<map-reduce>")
- .append("<job-tracker>").append(getJobTrackerUri())
+ .append("<job-tracker>").append(getResourceManagerUri())
.append("</job-tracker>").append("<name-node>")
.append(getNameNodeUri()).append("</name-node>")
.append(mrConfig.toXmlString(false)).append("</map-reduce>");
@@ -1258,7 +1258,7 @@ public class TestMapReduceActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<map-reduce>" +
- " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <main-class>CLASS</main-class>" +
" <file>" + jar.toString() +
[38/48] oozie git commit: Revert "OOZIE-2729 OYA: refactor XTestCase."
Posted by pb...@apache.org.
Revert "OOZIE-2729 OYA: refactor XTestCase."
This reverts commit ba68347b56d6dd7eec792eb463cc36e9728ddbd5.
Change-Id: I55ff167974fbcdd80414eda65eddf36fea1eee27
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/e5070b1b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/e5070b1b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/e5070b1b
Branch: refs/heads/oya
Commit: e5070b1b8560994f743393a9a664b3255e8766d5
Parents: 7a67022
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Mon Nov 28 14:03:17 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Mon Nov 28 14:03:17 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/TestFsActionExecutor.java | 4 +-
.../oozie/action/hadoop/TestShellMain.java | 5 +-
.../command/coord/TestCoordRerunXCommand.java | 4 +-
.../oozie/dependency/TestHCatURIHandler.java | 4 +-
.../oozie/jms/TestDefaultConnectionContext.java | 2 +-
.../oozie/jms/TestHCatMessageHandler.java | 2 +-
.../oozie/jms/TestJMSJobEventListener.java | 9 +-
.../oozie/jms/TestJMSSLAEventListener.java | 4 +-
.../oozie/service/TestJMSAccessorService.java | 7 +-
.../apache/oozie/test/MiniOozieTestCase.java | 3 +-
.../apache/oozie/test/TestCaseDirectories.java | 139 ----
.../apache/oozie/test/TestConfigurations.java | 111 ---
.../org/apache/oozie/test/TestConstants.java | 96 ---
.../org/apache/oozie/test/TestDbCleaner.java | 126 ----
.../apache/oozie/test/TestPropertiesLoader.java | 102 ---
.../apache/oozie/test/TestSystemProperties.java | 105 ---
.../org/apache/oozie/test/TestXTestCase.java | 8 +-
.../java/org/apache/oozie/test/XFsTestCase.java | 6 +-
.../java/org/apache/oozie/test/XTestCase.java | 742 +++++++++++++++----
19 files changed, 626 insertions(+), 853 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
index e7fd9c5..5618cf8 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
@@ -35,7 +35,7 @@ import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.URIHandlerService;
import org.apache.oozie.service.WorkflowAppService;
-import org.apache.oozie.test.TestConstants;
+import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XmlUtils;
import org.jdom.Element;
@@ -305,7 +305,7 @@ public class TestFsActionExecutor extends ActionExecutorTestCase {
public void testDeleteHcat() throws Exception {
//setting current user as test user because directory structure created by HCat have current user permissions (755).
- setSystemProperty(TestConstants.TEST_USER1_PROP, System.getProperty("user.name"));
+ setSystemProperty(XTestCase.TEST_USER1_PROP, System.getProperty("user.name"));
createTestTable();
addPartition(db, table, "year=2012;month=12;dt=02;country=us");
URI hcatURI = getHCatURI(db, table, "country=us;year=2012;month=12;dt=02");
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
index d184a5a..a7d6c18 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
@@ -25,7 +25,6 @@ import java.io.FileWriter;
import java.io.Writer;
import java.util.Properties;
-import org.apache.oozie.test.TestConstants;
import org.apache.oozie.util.XConfiguration;
//Test cases are mainly implemented in the Base class
@@ -51,8 +50,8 @@ public class TestShellMain extends ShellTestCase {
jobConf.set("fs.default.name", getNameNodeUri());
- jobConf.set(ShellMain.CONF_OOZIE_SHELL_EXEC, TestConstants.SHELL_COMMAND_NAME);
- String[] args = new String[] { TestConstants.SHELL_COMMAND_SCRIPTFILE_OPTION, script.toString(), "A", "B" };
+ jobConf.set(ShellMain.CONF_OOZIE_SHELL_EXEC, SHELL_COMMAND_NAME);
+ String[] args = new String[] { SHELL_COMMAND_SCRIPTFILE_OPTION, script.toString(), "A", "B" };
ActionUtils.setStrings(jobConf, ShellMain.CONF_OOZIE_SHELL_ARGS, args);
ActionUtils.setStrings(jobConf, ShellMain.CONF_OOZIE_SHELL_ENVS,
new String[] { "var1=value1", "var2=value2" });
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
index 6ae0ae2..9a05a85 100644
--- a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
@@ -59,8 +59,8 @@ import org.apache.oozie.service.Services;
import org.apache.oozie.service.StatusTransitService;
import org.apache.oozie.service.URIHandlerService;
import org.apache.oozie.store.StoreException;
-import org.apache.oozie.test.TestConstants;
import org.apache.oozie.test.XDataTestCase;
+import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XLog;
@@ -571,7 +571,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
*/
public void testCoordRerunCleanupForHCat() throws Exception {
//setting current user as test user because directory structure created by HCat have current user permissions (755).
- setSystemProperty(TestConstants.TEST_USER1_PROP, System.getProperty("user.name"));
+ setSystemProperty(XTestCase.TEST_USER1_PROP, System.getProperty("user.name"));
super.setupHCatalogServer();
services = super.setupServicesForHCatalog();
services.init();
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java b/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
index 5f62c11..615f5e1 100644
--- a/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
+++ b/core/src/test/java/org/apache/oozie/dependency/TestHCatURIHandler.java
@@ -24,8 +24,8 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.service.HCatAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.URIHandlerService;
-import org.apache.oozie.test.TestConstants;
import org.apache.oozie.test.XHCatTestCase;
+import org.apache.oozie.test.XTestCase;
import org.junit.Test;
public class TestHCatURIHandler extends XHCatTestCase {
@@ -69,7 +69,7 @@ public class TestHCatURIHandler extends XHCatTestCase {
@Test
public void testExists() throws Exception {
//setting current user as test user because directory structure created by HCat have current user permissions (755).
- setSystemProperty(TestConstants.TEST_USER1_PROP, System.getProperty("user.name"));
+ setSystemProperty(XTestCase.TEST_USER1_PROP, System.getProperty("user.name"));
createTestTable();
addPartition(db, table, "year=2012;month=12;dt=02;country=us");
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java b/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
index 5346a91..e941a8e 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestDefaultConnectionContext.java
@@ -44,7 +44,7 @@ public class TestDefaultConnectionContext extends XTestCase {
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," + JMSTopicService.class.getName());
conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#"
- + ACTIVE_MQ_CONN_FACTORY + ";" + "java.naming.provider.url#" + LOCAL_ACTIVE_MQ_BROKER
+ + ActiveMQConnFactory + ";" + "java.naming.provider.url#" + localActiveMQBroker
+ ";connectionFactoryNames#" + "ConnectionFactory");
services.init();
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java b/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
index 2ce5431..1a1a44e 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestHCatMessageHandler.java
@@ -66,7 +66,7 @@ public class TestHCatMessageHandler extends XDataTestCase {
super.setUp();
services = super.setupServicesForHCatalog();
services.init();
- connFac = new ActiveMQConnectionFactory(LOCAL_ACTIVE_MQ_BROKER);
+ connFac = new ActiveMQConnectionFactory(localActiveMQBroker);
conn = connFac.createConnection();
conn.start();
session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java b/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
index 913112f..728916e 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestJMSJobEventListener.java
@@ -38,6 +38,9 @@ import org.apache.oozie.client.event.jms.JMSHeaderConstants;
import org.apache.oozie.client.event.message.CoordinatorActionMessage;
import org.apache.oozie.client.event.message.WorkflowJobMessage;
import org.apache.oozie.event.*;
+import org.apache.oozie.jms.ConnectionContext;
+import org.apache.oozie.jms.JMSConnectionInfo;
+import org.apache.oozie.jms.JMSJobEventListener;
import org.apache.oozie.service.JMSAccessorService;
import org.apache.oozie.service.JMSTopicService;
import org.apache.oozie.service.Services;
@@ -60,8 +63,8 @@ public class TestJMSJobEventListener extends XTestCase {
conf = services.getConf();
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," + JMSTopicService.class.getName());
- conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY
- + ";" + "java.naming.provider.url#" + LOCAL_ACTIVE_MQ_BROKER + ";connectionFactoryNames#"
+ conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ActiveMQConnFactory
+ + ";" + "java.naming.provider.url#" + localActiveMQBroker + ";connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
}
@@ -326,7 +329,7 @@ public class TestJMSJobEventListener extends XTestCase {
int randomPort = 30000 + random.nextInt(10000);
String brokerURl = "tcp://localhost:" + randomPort;
conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#"
- + ACTIVE_MQ_CONN_FACTORY + ";" + "java.naming.provider.url#" + brokerURl + ";connectionFactoryNames#"
+ + ActiveMQConnFactory + ";" + "java.naming.provider.url#" + brokerURl + ";connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
JMSJobEventListener wfEventListener = new JMSJobEventListener();
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java b/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
index 7b9e64b..30fd151 100644
--- a/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
+++ b/core/src/test/java/org/apache/oozie/jms/TestJMSSLAEventListener.java
@@ -56,8 +56,8 @@ public class TestJMSSLAEventListener extends XTestCase {
conf = services.getConf();
conf.set(Services.CONF_SERVICE_EXT_CLASSES,
JMSAccessorService.class.getName() + "," + JMSTopicService.class.getName());
- conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY
- + ";" + "java.naming.provider.url#" + LOCAL_ACTIVE_MQ_BROKER + ";" + "connectionFactoryNames#"
+ conf.set(JMSJobEventListener.JMS_CONNECTION_PROPERTIES, "java.naming.factory.initial#" + ActiveMQConnFactory
+ + ";" + "java.naming.provider.url#" + localActiveMQBroker + ";" + "connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java b/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
index 96803db..505049d 100644
--- a/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestJMSAccessorService.java
@@ -21,6 +21,7 @@ package org.apache.oozie.service;
import java.net.URI;
import java.util.Random;
+import javax.jms.JMSException;
import javax.jms.Session;
import org.apache.activemq.broker.BrokerService;
@@ -160,7 +161,7 @@ public class TestJMSAccessorService extends XTestCase {
servicesConf.set(JMSAccessorService.CONF_RETRY_INITIAL_DELAY, "1");
servicesConf.set(JMSAccessorService.CONF_RETRY_MAX_ATTEMPTS, "3");
servicesConf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES, "default=java.naming.factory.initial#"
- + ACTIVE_MQ_CONN_FACTORY + ";" + "java.naming.provider.url#" + brokerURl + ";" + "connectionFactoryNames#"
+ + ActiveMQConnFactory + ";" + "java.naming.provider.url#" + brokerURl + ";" + "connectionFactoryNames#"
+ "ConnectionFactory");
services.init();
HCatAccessorService hcatService = Services.get().get(HCatAccessorService.class);
@@ -192,7 +193,7 @@ public class TestJMSAccessorService extends XTestCase {
services = super.setupServicesForHCatalog();
int randomPort = 30000 + random.nextInt(10000);
String brokerURL = "tcp://localhost:" + randomPort;
- String jndiPropertiesString = "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY + ";"
+ String jndiPropertiesString = "java.naming.factory.initial#" + ActiveMQConnFactory + ";"
+ "java.naming.provider.url#" + brokerURL + ";" + "connectionFactoryNames#" + "ConnectionFactory";
Configuration servicesConf = services.getConf();
servicesConf.set(JMSAccessorService.CONF_RETRY_INITIAL_DELAY, "1");
@@ -241,7 +242,7 @@ public class TestJMSAccessorService extends XTestCase {
public void testConnectionRetryMaxAttempt() throws Exception {
services.destroy();
services = super.setupServicesForHCatalog();
- String jndiPropertiesString = "java.naming.factory.initial#" + ACTIVE_MQ_CONN_FACTORY + ";"
+ String jndiPropertiesString = "java.naming.factory.initial#" + ActiveMQConnFactory + ";"
+ "java.naming.provider.url#" + "tcp://localhost:12345;connectionFactoryNames#ConnectionFactory";
Configuration servicesConf = services.getConf();
servicesConf.set(JMSAccessorService.CONF_RETRY_INITIAL_DELAY, "1");
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java b/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
index 93b9cd0..12ede02 100644
--- a/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/MiniOozieTestCase.java
@@ -18,6 +18,7 @@
package org.apache.oozie.test;
+import org.apache.oozie.test.XFsTestCase;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.local.LocalOozie;
@@ -38,7 +39,7 @@ import org.apache.oozie.local.LocalOozie;
* 'hdfs://localhost:9000'.
* <p/>
* The test working directory is created in the specified FS URI, under the current user name home directory, under the
- * subdirectory name specified wit the system property {@link TestConstants#OOZIE_TEST_DIR}. The default value is '/tmp'.
+ * subdirectory name specified wit the system property {@link XTestCase#OOZIE_TEST_DIR}. The default value is '/tmp'.
* <p/> The path of the test working directory is: '$FS_URI/user/$USER/$OOZIE_TEST_DIR/oozietest/$TEST_CASE_CLASS/$TEST_CASE_METHOD/'
* <p/> For example: 'hdfs://localhost:9000/user/tucu/tmp/oozietest/org.apache.oozie.service.TestELService/testEL/'
* <p/>
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java b/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java
deleted file mode 100644
index 36be4da..0000000
--- a/core/src/test/java/org/apache/oozie/test/TestCaseDirectories.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.test;
-
-import junit.framework.TestCase;
-import org.apache.oozie.util.ParamChecker;
-import org.apache.oozie.util.XLog;
-
-import java.io.File;
-import java.io.IOException;
-
-class TestCaseDirectories {
-
- /**
- * Create the test working directory.
- *
- * @param testCase testcase instance to obtain the working directory.
- * @param cleanup indicates if the directory should be cleaned up if it exists.
- * @return return the path of the test working directory, it is always an absolute path.
- * @throws Exception if the test working directory could not be created or cleaned up.
- */
- String createTestCaseDir(final TestCase testCase, final boolean cleanup) throws Exception {
- final String testCaseDir = getTestCaseDirInternal(testCase);
- System.out.println();
- System.out.println(XLog.format("Setting testcase work dir[{0}]", testCaseDir));
- if (cleanup) {
- delete(new File(testCaseDir));
- }
- final File dir = new File(testCaseDir);
- if (!dir.mkdirs()) {
- throw new RuntimeException(XLog.format("Could not create testcase dir[{0}]", testCaseDir));
- }
- return testCaseDir;
- }
-
- /**
- * Return the test working directory.
- * <p/>
- * It returns <code>${oozie.test.dir}/oozietests/TESTCLASSNAME/TESTMETHODNAME</code>.
- *
- * @param testCase testcase instance to obtain the working directory.
- * @return the test working directory.
- */
- private String getTestCaseDirInternal(final TestCase testCase) {
- ParamChecker.notNull(testCase, "testCase");
-
- File dir = new File(System.getProperty(TestConstants.OOZIE_TEST_DIR, "target/test-data"));
-
- dir = new File(dir, "oozietests").getAbsoluteFile();
- dir = new File(dir, testCase.getClass().getName());
- dir = new File(dir, testCase.getName());
-
- return dir.getAbsolutePath();
- }
-
- protected void delete(final File file) throws IOException {
- ParamChecker.notNull(file, "file");
- if (file.getAbsolutePath().length() < 5) {
- throw new RuntimeException(XLog.format("path [{0}] is too short, not deleting", file.getAbsolutePath()));
- }
- if (file.exists()) {
- if (file.isDirectory()) {
- final File[] children = file.listFiles();
- if (children != null) {
- for (final File child : children) {
- delete(child);
- }
- }
- }
- if (!file.delete()) {
- throw new RuntimeException(XLog.format("could not delete path [{0}]", file.getAbsolutePath()));
- }
- }
- else {
- // With a dangling symlink, exists() doesn't return true so try to delete it anyway; we fail silently in case the file
- // truely doesn't exist
- file.delete();
- }
- }
-
- String createTestCaseSubdir(String testCaseDir, String[] subDirNames) {
- ParamChecker.notNull(subDirNames, "subDirName");
- if (subDirNames.length == 0) {
- throw new RuntimeException(XLog.format("Could not create testcase subdir ''; it already exists"));
- }
-
- File dir = new File(testCaseDir);
- for (int i = 0; i < subDirNames.length; i++) {
- ParamChecker.notNull(subDirNames[i], "subDirName[" + i + "]");
- dir = new File(dir, subDirNames[i]);
- }
-
- if (!dir.mkdirs()) {
- throw new RuntimeException(XLog.format("Could not create testcase subdir[{0}]", dir));
- }
- return dir.getAbsolutePath();
- }
-
- void createTestDirOrError() {
- final String baseDir = System.getProperty(TestConstants.OOZIE_TEST_DIR, new File("target/test-data").getAbsolutePath());
- String msg = null;
- final File testDir = new File(baseDir);
- if (!testDir.isAbsolute()) {
- msg = XLog.format("System property [{0}]=[{1}] must be set to an absolute path", TestConstants.OOZIE_TEST_DIR, baseDir);
- }
- else {
- if (baseDir.length() < 4) {
- msg = XLog.format("System property [{0}]=[{1}] path must be at least 4 chars", TestConstants.OOZIE_TEST_DIR, baseDir);
- }
- }
- if (msg != null) {
- System.err.println();
- System.err.println(msg);
- System.exit(-1);
- }
- testDir.mkdirs();
- if (!testDir.exists() || !testDir.isDirectory()) {
- System.err.println();
- System.err.println(XLog.format("Could not create test dir [{0}]", baseDir));
- System.exit(-1);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java b/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
deleted file mode 100644
index c6c1d23..0000000
--- a/core/src/test/java/org/apache/oozie/test/TestConfigurations.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.test;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.oozie.dependency.FSURIHandler;
-import org.apache.oozie.dependency.HCatURIHandler;
-import org.apache.oozie.service.*;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Map;
-
-class TestConfigurations {
-
- Configuration createYarnConfig(final Configuration parentConfig) {
- final Configuration yarnConfig = new YarnConfiguration(parentConfig);
-
- yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
- yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
-
- return yarnConfig;
- }
-
- @SuppressWarnings("deprecation")
- JobConf createJobConfFromYarnCluster(final Configuration yarnConfiguration) {
- final JobConf jobConf = new JobConf();
- final JobConf jobConfYarn = new JobConf(yarnConfiguration);
-
- for (final Map.Entry<String, String> entry : jobConfYarn) {
- // MiniMRClientClusterFactory sets the job jar in Hadoop 2.0 causing tests to fail
- // TODO call conf.unset after moving completely to Hadoop 2.x
- if (!(entry.getKey().equals("mapreduce.job.jar") || entry.getKey().equals("mapred.jar"))) {
- jobConf.set(entry.getKey(), entry.getValue());
- }
- }
-
- return jobConf;
- }
-
- JobConf createPristineJobConf(final String jobTrackerUri, final String nameNodeUri) {
- final JobConf jobConf = new JobConf();
-
- jobConf.set("mapred.job.tracker", jobTrackerUri);
- jobConf.set("fs.default.name", nameNodeUri);
-
- return jobConf;
- }
-
- JobConf createDFSConfig(String oozieUser, String testGroup) throws UnknownHostException {
- final JobConf conf = new JobConf();
- conf.set("dfs.block.access.token.enable", "false");
- conf.set("dfs.permissions", "true");
- conf.set("hadoop.security.authentication", "simple");
-
- //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
- final StringBuilder sb = new StringBuilder();
- sb.append("127.0.0.1,localhost");
- for (final InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
- sb.append(",").append(i.getCanonicalHostName());
- }
- conf.set("hadoop.proxyuser." + oozieUser + ".hosts", sb.toString());
-
- conf.set("hadoop.proxyuser." + oozieUser + ".groups", testGroup);
- conf.set("mapred.tasktracker.map.tasks.maximum", "4");
- conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
-
- conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
-
- // Scheduler properties required for YARN CapacityScheduler to work
- conf.set("yarn.scheduler.capacity.root.queues", "default");
- conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
- // Required to prevent deadlocks with YARN CapacityScheduler
- conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
-
- return conf;
- }
-
- void setConfigurationForHCatalog(final Services services) {
- final Configuration conf = services.getConf();
-
- conf.set(Services.CONF_SERVICE_EXT_CLASSES,
- JMSAccessorService.class.getName() + "," +
- PartitionDependencyManagerService.class.getName() + "," +
- HCatAccessorService.class.getName());
- conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
- "default=java.naming.factory.initial#" + XTestCase.ACTIVE_MQ_CONN_FACTORY + ";" +
- "java.naming.provider.url#" + XTestCase.LOCAL_ACTIVE_MQ_BROKER +
- "connectionFactoryNames#" + "ConnectionFactory");
- conf.set(URIHandlerService.URI_HANDLERS,
- FSURIHandler.class.getName() + "," + HCatURIHandler.class.getName());
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestConstants.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestConstants.java b/core/src/test/java/org/apache/oozie/test/TestConstants.java
deleted file mode 100644
index f7d4580..0000000
--- a/core/src/test/java/org/apache/oozie/test/TestConstants.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.test;
-
-import org.apache.hadoop.util.Shell;
-
-public class TestConstants {
- /**
- * System property that specifies the default test user name used by
- * the tests. The defalt value of this property is <tt>test</tt>.
- */
- public static final String TEST_USER1_PROP = "oozie.test.user.test";
- /**
- * System property that specifies the test groiup used by the tests.
- * The default value of this property is <tt>testg</tt>.
- */
- public static final String TEST_GROUP_PROP2 = "oozie.test.group2";
- /**
- * Name of the shell command
- */
- public static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd" : "bash";
- /**
- * Extension for shell script files
- */
- protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION = (Shell.WINDOWS) ? "cmd" : "sh";
- /**
- * Option for shell command to pass script files
- */
- public static final String SHELL_COMMAND_SCRIPTFILE_OPTION = (Shell.WINDOWS) ? "/c" : "-c";
- /**
- * System property to specify the parent directory for the 'oozietests' directory to be used as base for all test
- * working directories. </p> If this property is not set, the assumed value is '/tmp'.
- */
- static final String OOZIE_TEST_DIR = "oozie.test.dir";
- /**
- * System property to specify the Hadoop Job Tracker to use for testing. </p> If this property is not set, the
- * assumed value is 'locahost:9001'.
- */
- static final String OOZIE_TEST_JOB_TRACKER = "oozie.test.job.tracker";
- /**
- * System property to specify the Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
- * value is 'locahost:9000'.
- */
- static final String OOZIE_TEST_NAME_NODE = "oozie.test.name.node";
- /**
- * System property to specify the second Hadoop Name Node to use for testing. </p> If this property is not set, the assumed
- * value is 'locahost:9100'.
- */
- static final String OOZIE_TEST_NAME_NODE2 = "oozie.test.name.node2";
- /**
- * System property to specify the Hadoop Version to use for testing. </p> If this property is not set, the assumed
- * value is "0.20.0"
- */
- static final String HADOOP_VERSION = "hadoop.version";
- /**
- * System property that specifies the user that test oozie instance runs as.
- * The value of this property defaults to the "${user.name} system property.
- */
- static final String TEST_OOZIE_USER_PROP = "oozie.test.user.oozie";
- /**
- * System property that specifies an auxilliary test user name used by the
- * tests. The default value of this property is <tt>test2</tt>.
- */
- static final String TEST_USER2_PROP = "oozie.test.user.test2";
- /**
- * System property that specifies another auxilliary test user name used by
- * the tests. The default value of this property is <tt>test3</tt>.
- */
- static final String TEST_USER3_PROP = "oozie.test.user.test3";
- /**
- * System property that specifies the test groiup used by the tests.
- * The default value of this property is <tt>testg</tt>.
- */
- static final String TEST_GROUP_PROP = "oozie.test.group";
- /**
- * System property that specifies the wait time, in seconds, between testcases before
- * triggering a shutdown. The default value is 10 sec.
- */
- static final String TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT = "oozie.test.minicluster.monitor.shutdown.wait";
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java b/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java
deleted file mode 100644
index 318274c..0000000
--- a/core/src/test/java/org/apache/oozie/test/TestDbCleaner.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.test;
-
-import org.apache.commons.logging.LogFactory;
-import org.apache.oozie.*;
-import org.apache.oozie.service.JPAService;
-import org.apache.oozie.service.ServiceException;
-import org.apache.oozie.service.Services;
-import org.apache.oozie.service.StoreService;
-import org.apache.oozie.sla.SLARegistrationBean;
-import org.apache.oozie.sla.SLASummaryBean;
-import org.apache.oozie.store.StoreException;
-import org.apache.oozie.util.XLog;
-
-import javax.persistence.EntityManager;
-import javax.persistence.FlushModeType;
-import javax.persistence.TypedQuery;
-import java.util.List;
-
-public class TestDbCleaner {
- private static final XLog log = new XLog(LogFactory.getLog(TestDbCleaner.class));
-
- /**
- * Minimal set of require Services for cleaning up the database ({@link JPAService} and {@link StoreService})
- */
- private static final String MINIMAL_SERVICES_FOR_DB_CLEANUP = JPAService.class.getName() + "," + StoreService.class.getName();
-
- private EntityManager entityManager;
-
- void cleanDbTables() throws StoreException, ServiceException {
- // If the Services are already loaded, then a test is likely calling this for something specific and we shouldn't mess with
- // the Services; so just cleanup the database
- if (Services.get() != null) {
- performCleanDbTables();
- }
- else {
- // Otherwise, this is probably being called during setup() and we should just load the minimal set of required Services
- // needed to cleanup the database and shut them down when done; the test will likely start its own Services later and
- // we don't want to interfere
- try {
- final Services services = new Services();
- services.getConf().set(Services.CONF_SERVICE_CLASSES, MINIMAL_SERVICES_FOR_DB_CLEANUP);
- services.init();
- performCleanDbTables();
- } finally {
- if (Services.get() != null) {
- Services.get().destroy();
- }
- }
- }
- }
-
- private void performCleanDbTables() throws StoreException {
- ensureEntityManager().setFlushMode(FlushModeType.COMMIT);
- ensureEntityManager().getTransaction().begin();
-
- final int wfjSize = removeAllByQueryName("GET_WORKFLOWS", WorkflowJobBean.class);
- final int wfaSize = removeAllByQueryName("GET_ACTIONS", WorkflowActionBean.class);
- final int cojSize = removeAllByQueryName("GET_COORD_JOBS", CoordinatorJobBean.class);
- final int coaSize = removeAllByQueryName("GET_COORD_ACTIONS", CoordinatorActionBean.class);
- final int bjSize = removeAllByQueryName("GET_BUNDLE_JOBS", BundleJobBean.class);
- final int baSize = removeAllByQueryName("GET_BUNDLE_ACTIONS", BundleActionBean.class);
- final int slaSize = removeAllByQueryName("GET_SLA_EVENTS", SLAEventBean.class);
- final int ssSize = removeAllByQueryName("GET_SLA_EVENTS", SLAEventBean.class);
- final int slaRegSize = removeAllByHql("select OBJECT(w) from SLARegistrationBean w", SLARegistrationBean.class);
- final int slaSumSize = removeAllByHql("select OBJECT(w) from SLASummaryBean w", SLASummaryBean.class);
-
- ensureEntityManager().getTransaction().commit();
- ensureEntityManager().close();
-
- log.info(wfjSize + " entries in WF_JOBS removed from DB!");
- log.info(wfaSize + " entries in WF_ACTIONS removed from DB!");
- log.info(cojSize + " entries in COORD_JOBS removed from DB!");
- log.info(coaSize + " entries in COORD_ACTIONS removed from DB!");
- log.info(bjSize + " entries in BUNDLE_JOBS removed from DB!");
- log.info(baSize + " entries in BUNDLE_ACTIONS removed from DB!");
- log.info(slaSize + " entries in SLA_EVENTS removed from DB!");
- log.info(ssSize + " entries in SLA_SUMMARY removed from DB!");
- log.info(slaRegSize + " entries in SLA_REGISTRATION removed from DB!");
- log.info(slaSumSize + " entries in SLA_SUMMARY removed from DB!");
- }
-
- private <E> int removeAllByQueryName(final String queryName, final Class<E> entityClass) {
- return removeAll(ensureEntityManager().createNamedQuery(queryName, entityClass));
- }
-
- private <E> int removeAllByHql(final String hql, final Class<E> entityClass) {
- return removeAll(ensureEntityManager().createQuery(hql, entityClass));
- }
-
- private <E> int removeAll(final TypedQuery<E> query) {
- final List<E> entitiesToRemove = query.getResultList();
- final int removedEntitiedCount = entitiesToRemove.size();
-
- for (final E entityToRemove : entitiesToRemove) {
- ensureEntityManager().remove(entityToRemove);
- }
-
- return removedEntitiedCount;
- }
-
- private EntityManager ensureEntityManager() {
- if (entityManager == null) {
- entityManager = Services.get().get(JPAService.class).getEntityManager();
- }
-
- return entityManager;
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java b/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java
deleted file mode 100644
index 97057f8..0000000
--- a/core/src/test/java/org/apache/oozie/test/TestPropertiesLoader.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.test;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.Map;
-import java.util.Properties;
-
-public class TestPropertiesLoader {
-
- public File loadTestPropertiesOrThrow() {
- try {
- final File oozieSrcDir = findOozieSrcDir();
-
- final String testPropsFile = System.getProperty(XTestCase.OOZIE_TEST_PROPERTIES, "test.properties");
- final File file = new File(testPropsFile).isAbsolute()
- ? new File(testPropsFile) : new File(oozieSrcDir, testPropsFile);
- if (file.exists()) {
- loadTestProperties(file);
- }
- else {
- checkTestPropertiesAndError();
- }
-
- return oozieSrcDir;
- } catch (final IOException ex) {
- throw new RuntimeException(ex);
- }
- }
-
- private File findOozieSrcDir() {
- File oozieSrcDir = new File("core").getAbsoluteFile();
-
- if (!oozieSrcDir.exists()) {
- oozieSrcDir = oozieSrcDir.getParentFile().getParentFile();
- oozieSrcDir = new File(oozieSrcDir, "core");
- }
- if (!oozieSrcDir.exists()) {
- oozieSrcDir = oozieSrcDir.getParentFile().getParentFile();
- oozieSrcDir = new File(oozieSrcDir, "core");
- }
- if (!oozieSrcDir.exists()) {
- // We're probably being run from outside of Oozie (e.g. MiniOozie), so just use a dummy location here.
- // Anything that uses this location should have a fallback anyway.
- oozieSrcDir = new File(".");
- }
- else {
- oozieSrcDir = oozieSrcDir.getParentFile();
- }
-
- return oozieSrcDir;
- }
-
- private void loadTestProperties(final File file) throws IOException {
- System.out.println();
- System.out.println("*********************************************************************************");
- System.out.println("Loading test system properties from: " + file.getAbsolutePath());
- System.out.println();
- final Properties props = new Properties();
- props.load(new FileReader(file));
- for (final Map.Entry entry : props.entrySet()) {
- if (!System.getProperties().containsKey(entry.getKey())) {
- System.setProperty((String) entry.getKey(), (String) entry.getValue());
- System.out.println(entry.getKey() + " = " + entry.getValue());
- }
- else {
- System.out.println(entry.getKey() + " IGNORED, using command line value = " +
- System.getProperty((String) entry.getKey()));
- }
- }
- System.out.println("*********************************************************************************");
- System.out.println();
- }
-
- private void checkTestPropertiesAndError() {
- if (System.getProperty(XTestCase.OOZIE_TEST_PROPERTIES) != null) {
- System.err.println();
- System.err.println("ERROR: Specified test file does not exist: " +
- System.getProperty(XTestCase.OOZIE_TEST_PROPERTIES));
- System.err.println();
- System.exit(-1);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java b/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java
deleted file mode 100644
index dcc770a..0000000
--- a/core/src/test/java/org/apache/oozie/test/TestSystemProperties.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.test;
-
-import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HadoopAccessorService;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class TestSystemProperties {
- private Map<String, String> sysProps;
-
- private boolean embeddedHadoop = false;
-
- private boolean embeddedHadoop2 = false;
-
- void setupSystemProperties(final String testCaseDir) throws Exception {
- if (System.getProperty("oozielocal.log") == null) {
- setSystemProperty("oozielocal.log", "/tmp/oozielocal.log");
- }
- if (System.getProperty("oozie.test.hadoop.security", "simple").equals("kerberos")) {
- System.setProperty("oozie.service.HadoopAccessorService.kerberos.enabled", "true");
- }
- if (System.getProperty("oozie.test.hadoop.minicluster", "true").equals("true")) {
- embeddedHadoop = true;
- // Second cluster is not necessary without the first one
- if (System.getProperty("oozie.test.hadoop.minicluster2", "false").equals("true")) {
- embeddedHadoop2 = true;
- }
- }
-
- if (System.getProperty("oozie.test.db.host") == null) {
- System.setProperty("oozie.test.db.host", "localhost");
- }
- setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
-
- setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
- }
-
- /**
- * Reset changed system properties to their original values. <p/> Called from {@link XTestCase#tearDown}.
- */
- void resetSystemProperties() {
- if (sysProps != null) {
- for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
- if (entry.getValue() != null) {
- System.setProperty(entry.getKey(), entry.getValue());
- }
- else {
- System.getProperties().remove(entry.getKey());
- }
- }
- sysProps.clear();
- }
- }
-
- /**
- * Set a system property for the duration of the method test case.
- * <p/>
- * After the test method ends the original value is restored.
- *
- * @param name system property name.
- * @param value value to set.
- */
- protected void setSystemProperty(final String name, final String value) {
- if (sysProps == null) {
- sysProps = new HashMap<String, String>();
- }
- if (!sysProps.containsKey(name)) {
- final String currentValue = System.getProperty(name);
- sysProps.put(name, currentValue);
- }
- if (value != null) {
- System.setProperty(name, value);
- }
- else {
- System.getProperties().remove(name);
- }
- }
-
- boolean isEmbeddedHadoop() {
- return embeddedHadoop;
- }
-
- boolean isEmbeddedHadoop2() {
- return embeddedHadoop2;
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
index 735f80a..7a2d70d 100644
--- a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
@@ -37,7 +37,7 @@ public class TestXTestCase extends TestCase {
}
public void testBaseDir() throws Exception {
- testBaseDir = System.getProperty(TestConstants.OOZIE_TEST_DIR);
+ testBaseDir = System.getProperty(XTestCase.OOZIE_TEST_DIR);
try {
MyXTestCase testcase = new MyXTestCase();
testcase.setName(getName());
@@ -47,7 +47,7 @@ public class TestXTestCase extends TestCase {
}
finally {
if (testBaseDir != null) {
- System.getProperties().setProperty(TestConstants.OOZIE_TEST_DIR, testBaseDir);
+ System.getProperties().setProperty(XTestCase.OOZIE_TEST_DIR, testBaseDir);
}
}
}
@@ -190,8 +190,8 @@ public class TestXTestCase extends TestCase {
public void testHadoopSysProps() {
if (TESTING) {
- setSystemProperty(TestConstants.OOZIE_TEST_NAME_NODE, "hdfs://xyz:9000");
- setSystemProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "xyz:9001");
+ setSystemProperty(XTestCase.OOZIE_TEST_NAME_NODE, "hdfs://xyz:9000");
+ setSystemProperty(XTestCase.OOZIE_TEST_JOB_TRACKER, "xyz:9001");
assertEquals("hdfs://xyz:9000", getNameNodeUri());
assertEquals("xyz:9001", getJobTrackerUri());
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/e5070b1b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
index 7a419d1..20529e8 100644
--- a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
+import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext;
import org.apache.oozie.service.HadoopAccessorException;
@@ -36,15 +37,16 @@ import org.apache.oozie.service.Services;
import java.io.IOException;
import java.net.URI;
+import java.net.URISyntaxException;
/**
* Base JUnit <code>TestCase</code> subclass used by all Oozie testcases that need Hadoop FS access. <p/> As part of its
* setup, this testcase class creates a unique test working directory per test method in the FS. <p/> The URI of the FS
- * namenode must be specified via the {@link TestConstants#OOZIE_TEST_NAME_NODE} system property. The default value is
+ * namenode must be specified via the {@link XTestCase#OOZIE_TEST_NAME_NODE} system property. The default value is
* 'hdfs://localhost:9000'.
*
* The test working directory is created in the specified FS URI, under the current user name home directory, under the
- * subdirectory name specified wit the system property {@link TestConstants#OOZIE_TEST_DIR}. The default value is '/tmp'.
+ * subdirectory name specified wit the system property {@link XTestCase#OOZIE_TEST_DIR}. The default value is '/tmp'.
* <p/> The path of the test working directory is: '$FS_URI/user/$USER/$OOZIE_TEST_DIR/oozietest/$TEST_CASE_CLASS/$TEST_CASE_METHOD/'
* <p/> For example: 'hdfs://localhost:9000/user/tucu/tmp/oozietest/org.apache.oozie.service.TestELService/testEL/'
*/
[24/48] oozie git commit: OOZIE-2592 fix Hive2&Sqoop action when
System.exit() called
Posted by pb...@apache.org.
OOZIE-2592 fix Hive2&Sqoop action when System.exit() called
Change-Id: Ia5db070d5c5287316393d56c626300c4e7777f34
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/5c4a79be
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/5c4a79be
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/5c4a79be
Branch: refs/heads/oya
Commit: 5c4a79bea4a9ec889c9856c4ef986c846454cc26
Parents: ddbd90f
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Tue Nov 22 15:39:00 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Tue Nov 22 16:55:39 2016 +0100
----------------------------------------------------------------------
.../oozie/action/hadoop/JavaActionExecutor.java | 44 ----------------
.../apache/oozie/action/hadoop/HiveMain.java | 7 ---
.../apache/oozie/action/hadoop/Hive2Main.java | 10 ++--
.../action/hadoop/TestHive2ActionExecutor.java | 55 +++++++++++---------
.../apache/oozie/action/hadoop/SqoopMain.java | 7 ---
5 files changed, 32 insertions(+), 91 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/5c4a79be/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index 2ec5266..56226ee 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -37,7 +37,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
-import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
@@ -52,8 +51,6 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TaskLog;
import org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
@@ -81,7 +78,6 @@ import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.command.coord.CoordActionStartXCommand;
import org.apache.oozie.service.ConfigurationService;
import org.apache.oozie.service.HadoopAccessorException;
@@ -333,46 +329,6 @@ public class JavaActionExecutor extends ActionExecutor {
}
}
- // FIXME: is this needed?
- private HashMap<String, List<String>> populateEnvMap(String input) {
- HashMap<String, List<String>> envMaps = new HashMap<String, List<String>>();
- String[] envEntries = input.split(",");
- for (String envEntry : envEntries) {
- String[] envKeyVal = envEntry.split("=");
- String envKey = envKeyVal[0].trim();
- List<String> valList = envMaps.get(envKey);
- if (valList == null) {
- valList = new ArrayList<String>();
- }
- valList.add(envKeyVal[1].trim());
- envMaps.put(envKey, valList);
- }
- return envMaps;
- }
-
- public int extractHeapSizeMB(String input) {
- int ret = 0;
- if(input == null || input.equals(""))
- return ret;
- Matcher m = heapPattern.matcher(input);
- String heapStr = null;
- String heapNum = null;
- // Grabs the last match which takes effect (in case that multiple Xmx options specified)
- while (m.find()) {
- heapStr = m.group(1);
- heapNum = m.group(2);
- }
- if (heapStr != null) {
- // when Xmx specified in Gigabyte
- if(heapStr.endsWith("g") || heapStr.endsWith("G")) {
- ret = Integer.parseInt(heapNum) * 1024;
- } else {
- ret = Integer.parseInt(heapNum);
- }
- }
- return ret;
- }
-
public static void parseJobXmlAndConfiguration(Context context, Element element, Path appPath, Configuration conf)
throws IOException, ActionExecutorException, HadoopAccessorException, URISyntaxException {
parseJobXmlAndConfiguration(context, element, appPath, conf, false);
http://git-wip-us.apache.org/repos/asf/oozie/blob/5c4a79be/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
index f59750b..242cd6c 100644
--- a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
+++ b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
@@ -317,13 +317,6 @@ public class HiveMain extends LauncherMain {
try {
runHive(arguments.toArray(new String[arguments.size()]));
}
- catch (SecurityException ex) {
- if (LauncherSecurityManager.getExitInvoked()) {
- if (LauncherSecurityManager.getExitCode() != 0) {
- throw ex;
- }
- }
- }
finally {
System.out.println("\n<<< Invocation of Hive command completed <<<\n");
writeExternalChildIDs(logFile, HIVE_JOB_IDS_PATTERNS, "Hive");
http://git-wip-us.apache.org/repos/asf/oozie/blob/5c4a79be/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
index ccf2aff..b418b89 100644
--- a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
+++ b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
@@ -240,13 +240,6 @@ public class Hive2Main extends LauncherMain {
try {
runBeeline(arguments.toArray(new String[arguments.size()]), logFile);
}
- catch (SecurityException ex) {
- if (LauncherSecurityManager.getExitInvoked()) {
- if (LauncherSecurityManager.getExitCode() != 0) {
- throw ex;
- }
- }
- }
finally {
System.out.println("\n<<< Invocation of Beeline command completed <<<\n");
writeExternalChildIDs(logFile, HIVE2_JOB_IDS_PATTERNS, "Beeline");
@@ -265,6 +258,9 @@ public class Hive2Main extends LauncherMain {
BeeLine beeLine = new BeeLine();
beeLine.setErrorStream(new PrintStream(new TeeOutputStream(System.err, new FileOutputStream(logFile))));
int status = beeLine.begin(args, null);
+ System.out.println("*** Return value from Beeline is: " + status);
+ System.out.println("*** Current security manager: " + System.getSecurityManager());
+ beeLine.close();
if (status != 0) {
System.exit(status);
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/5c4a79be/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
index 030942a..89bcd45 100644
--- a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
+++ b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
@@ -62,10 +62,9 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
setSystemProperty("oozie.service.ActionService.executor.classes", Hive2ActionExecutor.class.getName());
}
- @SuppressWarnings("unchecked")
public void testSetupMethodsForScript() throws Exception {
Hive2ActionExecutor ae = new Hive2ActionExecutor();
- List<Class> classes = new ArrayList<Class>();
+ List<Class<?>> classes = new ArrayList<>();
classes.add(Hive2Main.class);
assertEquals(classes, ae.getLauncherClasses());
@@ -103,10 +102,9 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
assertEquals("--dee", conf.get("oozie.hive2.args.1"));
}
- @SuppressWarnings("unchecked")
public void testSetupMethodsForQuery() throws Exception {
Hive2ActionExecutor ae = new Hive2ActionExecutor();
- List<Class> classes = new ArrayList<Class>();
+ List<Class<?>> classes = new ArrayList<>();
classes.add(Hive2Main.class);
assertEquals(classes, ae.getLauncherClasses());
@@ -245,28 +243,33 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
assertTrue(fs.exists(outputDir));
assertTrue(fs.isDirectory(outputDir));
}
- // Negative testcase with incorrect hive-query.
- {
- String query = getHive2BadScript(inputDir.toString(), outputDir.toString());
- Writer dataWriter = new OutputStreamWriter(fs.create(new Path(inputDir, DATA_FILENAME)));
- dataWriter.write(SAMPLE_DATA_TEXT);
- dataWriter.close();
- Context context = createContext(getQueryActionXml(query));
- final String launcherId = submitAction(context, Namespace.getNamespace("uri:oozie:hive2-action:0.2"));
- waitUntilYarnAppDoneAndAssertSuccess(launcherId);
- Configuration conf = new XConfiguration();
- conf.set("user.name", getTestUser());
- Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
- conf);
- assertFalse(LauncherMapperHelper.hasIdSwap(actionData));
- Hive2ActionExecutor ae = new Hive2ActionExecutor();
- ae.check(context, context.getAction());
- assertTrue(launcherId.equals(context.getAction().getExternalId()));
- assertEquals("FAILED/KILLED", context.getAction().getExternalStatus());
- ae.end(context, context.getAction());
- assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
- assertNull(context.getExternalChildIDs());
- }
+ }
+
+ public void testHive2ActionFails() throws Exception {
+ setupHiveServer2();
+ Path inputDir = new Path(getFsTestCaseDir(), INPUT_DIRNAME);
+ Path outputDir = new Path(getFsTestCaseDir(), OUTPUT_DIRNAME);
+ FileSystem fs = getFileSystem();
+
+ String query = getHive2BadScript(inputDir.toString(), outputDir.toString());
+ Writer dataWriter = new OutputStreamWriter(fs.create(new Path(inputDir, DATA_FILENAME)));
+ dataWriter.write(SAMPLE_DATA_TEXT);
+ dataWriter.close();
+ Context context = createContext(getQueryActionXml(query));
+ final String launcherId = submitAction(context, Namespace.getNamespace("uri:oozie:hive2-action:0.2"));
+ waitUntilYarnAppDoneAndAssertSuccess(launcherId);
+ Configuration conf = new XConfiguration();
+ conf.set("user.name", getTestUser());
+ Map<String, String> actionData = LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(),
+ conf);
+ assertFalse(LauncherMapperHelper.hasIdSwap(actionData));
+ Hive2ActionExecutor ae = new Hive2ActionExecutor();
+ ae.check(context, context.getAction());
+ assertTrue(launcherId.equals(context.getAction().getExternalId()));
+ assertEquals("FAILED/KILLED", context.getAction().getExternalStatus());
+ ae.end(context, context.getAction());
+ assertEquals(WorkflowAction.Status.ERROR, context.getAction().getStatus());
+ assertNull(context.getExternalChildIDs());
}
private String getHive2BadScript(String inputPath, String outputPath) {
http://git-wip-us.apache.org/repos/asf/oozie/blob/5c4a79be/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
----------------------------------------------------------------------
diff --git a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
index 9c6ec6c..92c960f 100644
--- a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
+++ b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
@@ -182,13 +182,6 @@ public class SqoopMain extends LauncherMain {
try {
runSqoopJob(sqoopArgs);
}
- catch (SecurityException ex) {
- if (LauncherSecurityManager.getExitInvoked()) {
- if (LauncherSecurityManager.getExitCode() != 0) {
- throw ex;
- }
- }
- }
finally {
System.out.println("\n<<< Invocation of Sqoop command completed <<<\n");
writeExternalChildIDs(logFile, SQOOP_JOB_IDS_PATTERNS, "Sqoop");
[36/48] oozie git commit: Revert "OOZIE-2729 change JT to RM in
methods"
Posted by pb...@apache.org.
Revert "OOZIE-2729 change JT to RM in methods"
This reverts commit ddbd90f88c511a714c35c39aedbce0ab2624724d.
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/7a67022a
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/7a67022a
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/7a67022a
Branch: refs/heads/oya
Commit: 7a67022ab37d527dbec87a728a8d01de419e17f1
Parents: 04b96c6
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Mon Nov 28 13:54:00 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Mon Nov 28 13:54:00 2016 +0100
----------------------------------------------------------------------
.../action/hadoop/TestDistCpActionExecutor.java | 2 +-
.../action/hadoop/TestJavaActionExecutor.java | 99 ++++++++++----------
.../oozie/action/hadoop/TestLauncher.java | 2 +-
.../oozie/action/hadoop/TestOozieJobInfo.java | 4 +-
.../apache/oozie/action/hadoop/TestRerun.java | 2 +-
.../action/hadoop/TestShellActionExecutor.java | 20 ++--
.../oozie/action/hadoop/TestShellMain.java | 2 +-
.../oozie/TestSubWorkflowActionExecutor.java | 4 +-
.../coord/TestCoordActionStartXCommand.java | 2 +-
.../command/coord/TestCoordRerunXCommand.java | 6 +-
.../command/wf/TestActionCheckXCommand.java | 6 +-
.../command/wf/TestActionStartXCommand.java | 8 +-
.../oozie/command/wf/TestReRunXCommand.java | 2 +-
.../oozie/command/wf/TestSubmitXCommand.java | 2 +-
.../wf/TestWorkflowActionKillXCommand.java | 2 +-
.../apache/oozie/event/TestEventGeneration.java | 2 +-
.../TestCoordJobsGetForPurgeJPAExecutor.java | 2 +-
...estCoordJobsToBeMaterializedJPAExecutor.java | 2 +-
.../service/TestHadoopAccessorService.java | 12 ++-
.../oozie/service/TestRecoveryService.java | 2 +-
.../oozie/service/TestShareLibService.java | 14 +--
.../org/apache/oozie/test/TestXTestCase.java | 2 +-
.../org/apache/oozie/test/XDataTestCase.java | 10 +-
.../java/org/apache/oozie/test/XFsTestCase.java | 4 +-
.../java/org/apache/oozie/test/XTestCase.java | 28 +++---
.../action/hadoop/TestHiveActionExecutor.java | 13 ++-
.../action/hadoop/TestHive2ActionExecutor.java | 15 ++-
.../action/hadoop/TestPigActionExecutor.java | 8 +-
.../apache/oozie/action/hadoop/TestPyspark.java | 8 +-
.../action/hadoop/TestSparkActionExecutor.java | 13 ++-
.../action/hadoop/TestSqoopActionExecutor.java | 17 +++-
.../hadoop/TestMapReduceActionExecutor.java | 44 ++++-----
32 files changed, 202 insertions(+), 157 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
index c953bb8..c1f0e6f 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
@@ -50,7 +50,7 @@ public class TestDistCpActionExecutor extends ActionExecutorTestCase{
os.close();
String actionXml = "<distcp>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<arg>" + inputPath + "</arg>"+
"<arg>" + outputPath + "</arg>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index 2b31207..bfc8ab4 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.ActionExecutor;
@@ -86,7 +87,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
setSystemProperty("oozie.service.ActionService.executor.classes", JavaActionExecutor.class.getName());
setSystemProperty("oozie.service.HadoopAccessorService.action.configurations",
- "*=hadoop-conf," + getResourceManagerUri() + "=action-conf");
+ "*=hadoop-conf," + getJobTrackerUri() + "=action-conf");
setSystemProperty(WorkflowAppService.SYSTEM_LIB_PATH, getFsTestCaseDir().toUri().getPath() + "/systemlib");
new File(getTestCaseConfDir(), "action-conf").mkdir();
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("test-action-config.xml");
@@ -134,7 +135,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
fail();
}
- Element actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ Element actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<job-xml>job.xml</job-xml>" + "<job-xml>job2.xml</job-xml>" + "<configuration>" +
"<property><name>oozie.launcher.a</name><value>LA</value></property>" +
@@ -195,7 +196,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
conf = ae.createBaseHadoopConf(context, actionXml);
assertEquals(protoConf.get(WorkflowAppService.HADOOP_USER), conf.get(WorkflowAppService.HADOOP_USER));
- assertEquals(getResourceManagerUri(), conf.get("yarn.resourcemanager.address"));
+ assertEquals(getJobTrackerUri(), conf.get("yarn.resourcemanager.address"));
assertEquals(getNameNodeUri(), conf.get("fs.default.name"));
conf = ae.createBaseHadoopConf(context, actionXml);
@@ -267,7 +268,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// FIXME - this file exists - must use the correct path
// assertTrue(getFileSystem().exists(new Path(context.getActionDir(), LauncherMapper.ACTION_CONF_XML)));
- actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>mapred.job.queue.name</name><value>AQ</value></property>" +
"<property><name>oozie.action.sharelib.for.java</name><value>sharelib-java</value></property>" +
@@ -280,7 +281,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("AQ", actionConf.get("mapred.job.queue.name"));
assertEquals("sharelib-java", actionConf.get("oozie.action.sharelib.for.java"));
- actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -290,7 +291,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
conf = ae.createLauncherConf(getFileSystem(), context, action, actionXml, actionConf);
assertEquals("LQ", conf.get("mapred.job.queue.name"));
- actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ actionXml = XmlUtils.parseXml("<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapred.job.queue.name</name><value>LQ</value></property>" +
"<property><name>mapred.job.queue.name</name><value>AQ</value></property>" +
@@ -353,7 +354,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testSimpestSleSubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"</java>";
@@ -371,7 +372,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testOutputSubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>out</arg>" +
@@ -396,7 +397,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testIdSwapSubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>id</arg>" +
@@ -425,7 +426,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
IOUtils.copyStream(is, os);
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester2.class.getName() + "</main-class>" +
"<file>" + appJarPath.toString() + "</file>" +
@@ -446,7 +447,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExit0SubmitOK() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>exit0</arg>" +
@@ -467,7 +468,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExit1SubmitError() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>exit1</arg>" +
@@ -490,7 +491,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExceptionSubmitException() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>exception</arg>" +
@@ -512,7 +513,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testExceptionSubmitThrowable() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"<arg>throwable</arg>" +
@@ -534,7 +535,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testKill() throws Exception {
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"</java>";
@@ -551,7 +552,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testRecovery() throws Exception {
final String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + LauncherMainTester.class.getName() + "</main-class>" +
"</java>";
@@ -611,7 +612,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<java>" +
- " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <main-class>CLASS</main-class>" +
" <file>" + jar.toString() + "</file>\n" +
@@ -719,7 +720,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
getFileSystem().create(rootArchive).close();
String actionXml = "<java>" +
- " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <main-class>CLASS</main-class>" +
" <file>" + jar.toString() +
@@ -801,7 +802,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
fs.mkdirs(delete);
String actionXml = "<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<prepare>" +
"<mkdir path='" + mkdir + "'/>" +
@@ -1189,7 +1190,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
}
public void testJavaOpts() throws Exception {
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1215,7 +1216,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1240,7 +1241,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("-Xmx200m JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1267,7 +1268,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1294,7 +1295,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertEquals("-Xmx200m JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapred.child.java.opts"));
assertEquals("-Xmx200m JAVA-OPT3 JAVA-OPT1 JAVA-OPT2", conf.get("mapreduce.map.java.opts"));
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<configuration>" + "<property><name>oozie.launcher.a</name><value>LA</value></property>"
+ "<property><name>a</name><value>AA</value></property>"
@@ -1332,7 +1333,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Path jar2Path = new Path(actionLibPath, "jar2.jar");
getFileSystem().create(jar2Path).close();
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>" +
"<property><name>oozie.launcher.oozie.libpath</name><value>" + actionLibPath + "</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -1355,7 +1356,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Path jar3Path = new Path(getFsTestCaseDir(), "jar3.jar");
getFileSystem().create(jar3Path).close();
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>" +
"<property><name>oozie.launcher.oozie.libpath</name><value>" + jar3Path + "</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -1374,7 +1375,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
assertTrue(cacheFilesStr.contains(jar3Path.toString()));
// Test adding a directory and a file (comma separated)
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" + "<configuration>" +
"<property><name>oozie.launcher.oozie.libpath</name><value>" + actionLibPath + "," + jar3Path +
"</value></property>" +
@@ -1423,7 +1424,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Path jar5Path = new Path(otherShareLibPath, "jar5.jar");
getFileSystem().create(jar5Path).close();
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>MAIN-CLASS</main-class>" +
"</java>";
@@ -1503,7 +1504,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
return "java-action-executor";
}
};
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNode2Uri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName()
+ "</main-class>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -1538,7 +1539,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testFilesystemScheme() throws Exception {
try {
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName()
+ "</main-class>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -1561,7 +1562,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLDefaults_launcherACLsSetToDefault() throws Exception {
// CASE: launcher specific ACLs not configured - set defaults
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>mapreduce.job.acl-view-job</name><value>VIEWER</value></property>" +
"<property><name>mapreduce.job.acl-modify-job</name><value>MODIFIER</value></property>" +
@@ -1586,7 +1587,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLDefaults_noFalseChange() throws Exception {
// CASE: launcher specific ACLs configured, but MR job ACLs not configured i.e. null. Check for no false changes to null
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapreduce.job.acl-view-job</name><value>V</value></property>" +
"<property><name>oozie.launcher.mapreduce.job.acl-modify-job</name><value>M</value></property>" +
@@ -1611,7 +1612,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLDefaults_explicitLauncherAndActionSettings() throws Exception {
// CASE: launcher specific ACLs configured, as well as MR job ACLs configured. Check that NO overriding with defaults
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>oozie.launcher.mapreduce.job.acl-view-job</name><value>V</value></property>" +
"<property><name>oozie.launcher.mapreduce.job.acl-modify-job</name><value>M</value></property>" +
@@ -1639,7 +1640,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testACLModifyJob() throws Exception {
// CASE 1: If user has provided modify-acl value
// then it should NOT be overridden by group name
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"<property><name>mapreduce.job.acl-modify-job</name><value>MODIFIER</value></property>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
@@ -1656,7 +1657,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// CASE 2: If user has not provided modify-acl value
// then it equals group name
- actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node> <configuration>" +
"</configuration>" + "<main-class>MAIN-CLASS</main-class>" +
"</java>";
@@ -1788,7 +1789,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
//Test UpdateCOnfForJavaTmpDir for launcherConf
String actionXml1 = "<java>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1824,7 +1825,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
//Test UpdateConfForJavaTmpDIr for actionConf
String actionXml = "<java>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1852,7 +1853,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Element actionXml = XmlUtils
.parseXml("<java>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1886,7 +1887,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Element actionXmlWithTez = XmlUtils
.parseXml("<java>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -1902,7 +1903,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
Element actionXmlATSDisabled = XmlUtils
.parseXml("<java>"
+ "<job-tracker>"
- + getResourceManagerUri()
+ + getJobTrackerUri()
+ "</job-tracker>"
+ "<name-node>"
+ getNameNodeUri()
@@ -2079,7 +2080,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
// (first should be used)
// 4. Fully qualified path located in the second filesystem
String str = "<java>"
- + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNode2Uri() + "</name-node>"
+ "<job-xml>" + jobXmlAbsolutePath.toString() + "</job-xml>"
+ "<job-xml>job2.xml</job-xml>"
@@ -2169,7 +2170,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
is = new FileInputStream(jarFile);
IOUtils.copyStream(is, os3);
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNode2Uri() + "</name-node>" +
"<job-xml>job.xml</job-xml>" +
"<main-class>"+ LauncherMainTester.class.getName() + "</main-class>" +
@@ -2212,7 +2213,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
w.write("A = load '$INPUT' using PigStorage(':');\n");
w.write("store B into '$OUTPUT' USING PigStorage();\n");
w.close();
- String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<prepare>" + "<delete path='outputdir' />" + "</prepare>"
+ "<configuration>" + "<property>" + "<name>mapred.compress.map.output</name>" + "<value>true</value>"
+ "</property>" + "<property>" + "<name>mapred.job.queue.name</name>" + "<value>default</value>"
@@ -2236,7 +2237,7 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
public void testDefaultConfigurationInLauncher() throws Exception {
JavaActionExecutor ae = new JavaActionExecutor();
Element actionXmlWithConfiguration = XmlUtils.parseXml(
- "<java>" + "<job-tracker>" + getResourceManagerUri() +"</job-tracker>" +
+ "<java>" + "<job-tracker>" + getJobTrackerUri() +"</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<configuration>" +
"<property><name>oozie.launcher.a</name><value>AA</value></property>" +
@@ -2245,16 +2246,16 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
"<main-class>MAIN-CLASS</main-class>" +
"</java>");
Element actionXmlWithoutConfiguration = XmlUtils.parseXml(
- "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>MAIN-CLASS</main-class>" +
"</java>");
Configuration conf = new Configuration(false);
Assert.assertEquals(0, conf.size());
- conf.set("yarn.resourcemanager.address", getResourceManagerUri());
+ conf.set("yarn.resourcemanager.address", getJobTrackerUri());
ae.setupLauncherConf(conf, actionXmlWithConfiguration, null, null);
- assertEquals(getResourceManagerUri(), conf.get("yarn.resourcemanager.address"));
+ assertEquals(getJobTrackerUri(), conf.get("yarn.resourcemanager.address"));
assertEquals("AA", conf.get("oozie.launcher.a"));
assertEquals("AA", conf.get("a"));
assertEquals("action.barbar", conf.get("oozie.launcher.action.foofoo"));
@@ -2263,9 +2264,9 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
conf = new Configuration(false);
Assert.assertEquals(0, conf.size());
- conf.set("yarn.resourcemanager.address", getResourceManagerUri());
+ conf.set("yarn.resourcemanager.address", getJobTrackerUri());
ae.setupLauncherConf(conf, actionXmlWithoutConfiguration, null, null);
- assertEquals(getResourceManagerUri(), conf.get("yarn.resourcemanager.address"));
+ assertEquals(getJobTrackerUri(), conf.get("yarn.resourcemanager.address"));
assertEquals("action.barbar", conf.get("oozie.launcher.action.foofoo"));
assertEquals("action.barbar", conf.get("action.foofoo"));
assertEquals(3, conf.size());
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
index 1005274..e7b9534 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestLauncher.java
@@ -79,7 +79,7 @@ public class TestLauncher extends XFsTestCase {
jobConf.setInt("mapred.reduce.max.attempts", 1);
jobConf.set("mapreduce.framework.name", "yarn");
- jobConf.set("mapred.job.tracker", getResourceManagerUri());
+ jobConf.set("mapred.job.tracker", getJobTrackerUri());
jobConf.set("fs.default.name", getNameNodeUri());
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
index d3dd377..9efacdd 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestOozieJobInfo.java
@@ -202,7 +202,7 @@ public class TestOozieJobInfo extends XDataTestCase {
writer2.write(APP1);
writer2.close();
jobConf.set(OozieClient.USER_NAME, getTestUser());
- jobConf.set("myJobTracker", getResourceManagerUri());
+ jobConf.set("myJobTracker", getJobTrackerUri());
jobConf.set("myNameNode", getNameNodeUri());
jobConf.set("wfAppPath", new Path(wfAppPath, "workflow.xml").toString());
jobConf.set("mrclass", MapperReducerForTest.class.getName());
@@ -239,7 +239,7 @@ public class TestOozieJobInfo extends XDataTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getResourceManagerUri());
+ conf.set("jobTracker", getJobTrackerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("appName", "bundle-app-name");
conf.set("start", "2009-02-01T00:00Z");
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
index fbb0512..cf9ce1b 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestRerun.java
@@ -90,7 +90,7 @@ public class TestRerun extends XFsTestCase {
final OozieClient wfClient = LocalOozie.getClient();
Properties conf = wfClient.createConfiguration();
conf.setProperty(OozieClient.APP_PATH, new Path(appPath, "workflow.xml").toString());
- conf.setProperty("jobTracker", getResourceManagerUri());
+ conf.setProperty("jobTracker", getJobTrackerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("mrclass", MapperReducerForTest.class.getName());
conf.setProperty("input", input.toString());
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
index fa13548..931307e 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellActionExecutor.java
@@ -26,10 +26,16 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.util.Shell;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.service.ActionService;
+import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.PropertiesUtils;
@@ -75,7 +81,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
public void testSetupMethods() throws Exception {
ShellActionExecutor ae = new ShellActionExecutor();
assertNull(ae.getLauncherClasses());
- Element actionXml = XmlUtils.parseXml("<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>"
+ Element actionXml = XmlUtils.parseXml("<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>"
+ "<name-node>" + getNameNodeUri() + "</name-node>" + "<exec>SCRIPT</exec>"
+ "<argument>a=A</argument>" + "<argument>b=B</argument>" + "</shell>");
@@ -117,7 +123,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample Shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
+ "<argument>" + SHELL_SCRIPTNAME + "</argument>" + "<argument>A</argument>" + "<argument>B</argument>"
+ "<env-var>var1=val1</env-var>" + "<env-var>var2=val2</env-var>" + "<file>" + script.toString()
@@ -143,7 +149,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample Shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>oozie.action.shell.setup.hadoop.conf.dir</name><value>true</value></property>"
+ "</configuration>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
@@ -187,7 +193,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample Shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>oozie.action.shell.setup.hadoop.conf.dir</name><value>true</value></property>"
+ "<property><name>oozie.action.shell.setup.hadoop.conf.dir.write.log4j.properties"
@@ -217,7 +223,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.close();
// Create sample shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
+ "<argument>" + SHELL_SCRIPTNAME + "</argument>" + "<argument>A</argument>" + "<argument>B</argument>" + "<file>"
+ script.toString() + "#" + script.getName() + "</file>" + "</shell>";
@@ -243,7 +249,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
w.write(PERL_SCRIPT_CONTENT);
w.close();
// Create a Sample Shell action using the perl script
- String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>perl</exec>" + "<argument>script.pl</argument>"
+ "<argument>A</argument>" + "<argument>B</argument>" + "<env-var>my_var1=my_val1</env-var>" + "<file>"
+ script.toString() + "#" + script.getName() + "</file>" + "<capture-output/>" + "</shell>";
@@ -270,7 +276,7 @@ public class TestShellActionExecutor extends ActionExecutorTestCase {
String envValueHavingEqualSign = "a=b;c=d";
// Create sample shell action xml
- String actionXml = "<shell>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<shell>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<exec>" + SHELL_EXEC + "</exec>" + "<argument>" + SHELL_PARAM + "</argument>"
+ "<argument>" + SHELL_SCRIPTNAME + "</argument>" + "<argument>A</argument>" + "<argument>B</argument>"
+ "<env-var>var1=val1</env-var>" + "<env-var>var2=" + envValueHavingEqualSign + "</env-var>" + "<file>" + script.toString()
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
index eb442f0..d184a5a 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestShellMain.java
@@ -47,7 +47,7 @@ public class TestShellMain extends ShellTestCase {
jobConf.setInt("mapred.map.tasks", 1);
jobConf.setInt("mapred.map.max.attempts", 1);
jobConf.setInt("mapred.reduce.max.attempts", 1);
- jobConf.set("mapred.job.tracker", getResourceManagerUri());
+ jobConf.set("mapred.job.tracker", getJobTrackerUri());
jobConf.set("fs.default.name", getNameNodeUri());
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java b/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
index f5c7c2e..e074d48 100644
--- a/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/oozie/TestSubWorkflowActionExecutor.java
@@ -559,7 +559,7 @@ public class TestSubWorkflowActionExecutor extends ActionExecutorTestCase {
"<start to='java' />" +
" <action name='java'>" +
"<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + JavaSleepAction.class.getName() + "</main-class>" +
"<arg>exit0</arg>" +
@@ -850,7 +850,7 @@ public class TestSubWorkflowActionExecutor extends ActionExecutorTestCase {
"<start to='java' />" +
"<action name='java'>" +
"<java>" +
- " <job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ " <job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
" <name-node>" + getNameNodeUri() + "</name-node>" +
" <configuration>" +
" <property>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java b/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
index 172479b..db66c72 100644
--- a/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/coord/TestCoordActionStartXCommand.java
@@ -198,7 +198,7 @@ public class TestCoordActionStartXCommand extends XDataTestCase {
jobConf.setProperty(OozieClient.COORDINATOR_APP_PATH, coordAppPath.toString());
jobConf.setProperty(OozieClient.USER_NAME, getTestUser());
jobConf.setProperty(OozieClient.GROUP_NAME, getTestGroup());
- jobConf.setProperty("myJobTracker", getResourceManagerUri());
+ jobConf.setProperty("myJobTracker", getJobTrackerUri());
jobConf.setProperty("myNameNode", getNameNodeUri());
jobConf.setProperty("wfAppPath", new Path(wfAppPath, "workflow.xml").toString());
jobConf.setProperty("mrclass", MapperReducerForTest.class.getName());
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
index d956b6c..6ae0ae2 100644
--- a/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/coord/TestCoordRerunXCommand.java
@@ -1083,7 +1083,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getResourceManagerUri());
+ conf.setProperty("jobTracker", getJobTrackerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
@@ -1293,7 +1293,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getResourceManagerUri());
+ conf.setProperty("jobTracker", getJobTrackerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
@@ -1306,7 +1306,7 @@ public class TestCoordRerunXCommand extends XDataTestCase {
"<start to='java' />" +
" <action name='java'>" +
"<java>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<main-class>" + JavaSleepAction.class.getName() + "</main-class>" +
"<arg>exit0</arg>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
index 4151182..135c34a 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestActionCheckXCommand.java
@@ -368,7 +368,7 @@ public class TestActionCheckXCommand extends XDataTestCase {
// At this point, the launcher job has started (but not finished)
// Now, shutdown the job tracker to pretend it has gone down during the launcher job
- executeWhileResourceManagerIsShutdown(new ShutdownResourceManagerExecutable() {
+ executeWhileJobTrackerIsShutdown(new ShutdownJobTrackerExecutable() {
@Override
public void execute() throws Exception {
assertEquals(0, action1.getRetries());
@@ -505,7 +505,7 @@ public class TestActionCheckXCommand extends XDataTestCase {
// At this point, the launcher job has finished and the map-reduce action has started (but not finished)
// Now, shutdown the job tracker to pretend it has gone down during the map-reduce job
- executeWhileResourceManagerIsShutdown(new ShutdownResourceManagerExecutable() {
+ executeWhileJobTrackerIsShutdown(new ShutdownJobTrackerExecutable() {
@Override
public void execute() throws Exception {
assertEquals(0, action1.getRetries());
@@ -647,7 +647,7 @@ public class TestActionCheckXCommand extends XDataTestCase {
w.close();
String actionXml = "<map-reduce>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<prepare><delete path=\"" + outputDir.toString() + "\"/></prepare>" +
"<configuration>" +
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
index 7c7ea5e..59cfdd2 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestActionStartXCommand.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
@@ -47,6 +48,7 @@ import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQ
import org.apache.oozie.executor.jpa.WorkflowJobInsertJPAExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor;
import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery;
+import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.InstrumentationService;
import org.apache.oozie.service.JPAService;
import org.apache.oozie.service.LiteWorkflowStoreService;
@@ -329,7 +331,7 @@ public class TestActionStartXCommand extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" +
@@ -415,7 +417,7 @@ public class TestActionStartXCommand extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
@@ -522,7 +524,7 @@ public class TestActionStartXCommand extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
index 298dd1e..45cbbc4 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestReRunXCommand.java
@@ -144,7 +144,7 @@ public class TestReRunXCommand extends XDataTestCase {
final OozieClient wfClient = LocalOozie.getClient();
Properties conf = wfClient.createConfiguration();
conf.setProperty("nameNode", getNameNodeUri());
- conf.setProperty("jobTracker", getResourceManagerUri());
+ conf.setProperty("jobTracker", getJobTrackerUri());
conf.setProperty(OozieClient.APP_PATH, getTestCaseFileUri("workflow.xml"));
conf.setProperty(OozieClient.USER_NAME, getTestUser());
conf.setProperty("cmd4", "echo1"); //expected to fail
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
index c1e2a5b..47ff8ca 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitXCommand.java
@@ -345,7 +345,7 @@ public class TestSubmitXCommand extends XDataTestCase {
writeToFile(wfXml, workflowUri);
Configuration conf = new XConfiguration();
conf.set("nameNode", getNameNodeUri());
- conf.set("jobTracker", getResourceManagerUri());
+ conf.set("jobTracker", getJobTrackerUri());
conf.set("foobarRef", "foobarRef");
conf.set("key", "job_prop_value");
conf.set(OozieClient.APP_PATH, workflowUri);
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index 1561ddc..05ceb90 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -131,7 +131,7 @@ public class TestWorkflowActionKillXCommand extends XDataTestCase {
action.setExternalChildIDs(childID);
String actionXml = "<map-reduce>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<configuration>" +
"<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName() +
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java b/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
index 6fd033f..59d0420 100644
--- a/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
+++ b/core/src/test/java/org/apache/oozie/event/TestEventGeneration.java
@@ -707,7 +707,7 @@ public class TestEventGeneration extends XDataTestCase {
action.setLastCheckTime(new Date());
action.setPendingOnly();
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<main-class>" + "${dummy}" + "</java>";
action.setConf(actionXml);
jpaService.execute(new WorkflowActionInsertJPAExecutor(action));
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
index 6d6acb4..39bbee4 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsGetForPurgeJPAExecutor.java
@@ -177,7 +177,7 @@ public class TestCoordJobsGetForPurgeJPAExecutor extends XFsTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getResourceManagerUri());
+ conf.setProperty("jobTracker", getJobTrackerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
index cd6c35f..3b72ecd 100644
--- a/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
+++ b/core/src/test/java/org/apache/oozie/executor/jpa/TestCoordJobsToBeMaterializedJPAExecutor.java
@@ -165,7 +165,7 @@ public class TestCoordJobsToBeMaterializedJPAExecutor extends XFsTestCase {
final OozieClient coordClient = LocalOozie.getCoordClient();
Properties conf = coordClient.createConfiguration();
conf.setProperty(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
- conf.setProperty("jobTracker", getResourceManagerUri());
+ conf.setProperty("jobTracker", getJobTrackerUri());
conf.setProperty("nameNode", getNameNodeUri());
conf.setProperty("wfAppPath", wfAppPath.toString());
conf.remove("user.name");
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
index 6ded256..e671b7a 100644
--- a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
@@ -20,11 +20,13 @@ package org.apache.oozie.service;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.authorize.*;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.oozie.test.XFsTestCase;
import org.apache.hadoop.mapred.JobConf;
@@ -152,7 +154,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
public void testCreateJobClient() throws Exception {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
- JobConf conf = has.createJobConf(getResourceManagerUri());
+ JobConf conf = has.createJobConf(getJobTrackerUri());
JobClient jc = has.createJobClient(getTestUser(), conf);
assertNotNull(jc);
@@ -167,7 +169,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
}
JobConf conf2 = new JobConf(false);
- conf2.set("mapred.job.tracker", getResourceManagerUri());
+ conf2.set("mapred.job.tracker", getJobTrackerUri());
try {
has.createJobClient(getTestUser(), conf2);
fail("Should have thrown exception because Configuration not created by HadoopAccessorService");
@@ -179,7 +181,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
public void testCreateYarnClient() throws Exception {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
- JobConf conf = has.createJobConf(getResourceManagerUri());
+ JobConf conf = has.createJobConf(getJobTrackerUri());
YarnClient yc = has.createYarnClient(getTestUser(), conf);
assertNotNull(yc);
@@ -195,7 +197,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
}
JobConf conf2 = new JobConf(false);
- conf2.set("yarn.resourcemanager.address", getResourceManagerUri());
+ conf2.set("yarn.resourcemanager.address", getJobTrackerUri());
try {
has.createYarnClient(getTestUser(), conf2);
fail("Should have thrown exception because Configuration not created by HadoopAccessorService");
@@ -207,7 +209,7 @@ public class TestHadoopAccessorService extends XFsTestCase {
public void testCreateFileSystem() throws Exception {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
- JobConf conf = has.createJobConf(getResourceManagerUri());
+ JobConf conf = has.createJobConf(getJobTrackerUri());
FileSystem fs = has.createFileSystem(getTestUser(), new URI(getNameNodeUri()), conf);
assertNotNull(fs);
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java b/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
index c949efe..a3270e9 100644
--- a/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestRecoveryService.java
@@ -905,7 +905,7 @@ public class TestRecoveryService extends XDataTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
index dc6f6ef..390545d 100644
--- a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
@@ -175,7 +175,7 @@ public class TestShareLibService extends XFsTestCase {
try {
services.init();
- String actionXml = "<java>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
@@ -203,7 +203,7 @@ public class TestShareLibService extends XFsTestCase {
conf.set(ShareLibService.SHIP_LAUNCHER_JAR, "true");
try {
services.init();
- String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
@@ -249,7 +249,7 @@ public class TestShareLibService extends XFsTestCase {
try {
services.init();
- String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<property><name>oozie.action.sharelib.for.pig</name><value>pig_10</value></property>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -537,7 +537,7 @@ public class TestShareLibService extends XFsTestCase {
createFile(libpath.toString() + Path.SEPARATOR + "pig_10" + Path.SEPARATOR + "pig-10.jar");
createFile(libpath.toString() + Path.SEPARATOR + "oozie" + Path.SEPARATOR + "oozie_luncher.jar");
- String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>"
+ "<property><name>oozie.action.sharelib.for.pig</name><value>pig_10</value></property>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -718,7 +718,7 @@ public class TestShareLibService extends XFsTestCase {
private URI[] setUpPigJob(boolean useSystemSharelib) throws Exception {
services.init();
- String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node></pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
@@ -863,7 +863,7 @@ public class TestShareLibService extends XFsTestCase {
// Test hive-site.xml not in distributed cache
setupSharelibConf("hive-site.xml", "oozie.hive_conf");
- String actionXml = "<hive>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<hive>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</hive>";
Element eActionXml = XmlUtils.parseXml(actionXml);
@@ -943,7 +943,7 @@ public class TestShareLibService extends XFsTestCase {
prop.put("oozie.hive_conf", "/user/test/" + sharelibPath + "/hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
- String actionXml = "<pig>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
index 9c0faa3..735f80a 100644
--- a/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/TestXTestCase.java
@@ -193,7 +193,7 @@ public class TestXTestCase extends TestCase {
setSystemProperty(TestConstants.OOZIE_TEST_NAME_NODE, "hdfs://xyz:9000");
setSystemProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "xyz:9001");
assertEquals("hdfs://xyz:9000", getNameNodeUri());
- assertEquals("xyz:9001", getResourceManagerUri());
+ assertEquals("xyz:9001", getJobTrackerUri());
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
index 93d291b..2105e2f 100644
--- a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
@@ -1094,7 +1094,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getResourceManagerUri());
+ conf.set("jobTracker", getJobTrackerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("appName", "bundle-app-name");
conf.set("coordName1", "coord1");
@@ -1320,7 +1320,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration jobConf = new XConfiguration();
jobConf.set(OozieClient.COORDINATOR_APP_PATH, appPath.toString());
jobConf.set(OozieClient.USER_NAME, getTestUser());
- jobConf.set("jobTracker", getResourceManagerUri());
+ jobConf.set("jobTracker", getJobTrackerUri());
jobConf.set("nameNode", getNameNodeUri());
jobConf.set("wfAppPath", wfAppPath.toString());
@@ -1434,7 +1434,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
w.write("dummy\n");
w.close();
- String actionXml = "<map-reduce>" + "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" + "<name-node>"
+ String actionXml = "<map-reduce>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
+ getNameNodeUri() + "</name-node>" + "<configuration>"
+ "<property><name>mapred.mapper.class</name><value>" + MapperReducerForTest.class.getName()
+ "</value></property>" + "<property><name>mapred.reducer.class</name><value>"
@@ -1492,7 +1492,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getResourceManagerUri());
+ conf.set("jobTracker", getJobTrackerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("appName", "bundle-app-name");
conf.set("coordName1", "coord1");
@@ -1562,7 +1562,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
Configuration conf = new XConfiguration();
conf.set(OozieClient.BUNDLE_APP_PATH, bundleAppPath.toString());
conf.set(OozieClient.USER_NAME, getTestUser());
- conf.set("jobTracker", getResourceManagerUri());
+ conf.set("jobTracker", getJobTrackerUri());
conf.set("nameNode", getNameNodeUri());
conf.set("coordName1", "coord1");
conf.set("coordName2", "coord2");
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
index a1bda01..7a419d1 100644
--- a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
@@ -171,8 +171,8 @@ public abstract class XFsTestCase extends XTestCase {
* @throws HadoopAccessorException thrown if the JobClient could not be obtained.
*/
protected JobClient createJobClient() throws HadoopAccessorException {
- JobConf conf = has.createJobConf(getResourceManagerUri());
- conf.set("mapred.job.tracker", getResourceManagerUri());
+ JobConf conf = has.createJobConf(getJobTrackerUri());
+ conf.set("mapred.job.tracker", getJobTrackerUri());
conf.set("fs.default.name", getNameNodeUri());
return has.createJobClient(getTestUser(), conf);
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 3f39f31..011bd0b 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -456,7 +456,7 @@ public abstract class XTestCase extends TestCase {
*
* @return the job tracker URI.
*/
- protected String getResourceManagerUri() {
+ protected String getJobTrackerUri() {
return System.getProperty(TestConstants.OOZIE_TEST_JOB_TRACKER, "localhost:9001");
}
@@ -577,6 +577,7 @@ public abstract class XTestCase extends TestCase {
private void createHdfsPathsAndSetupPermissions() throws IOException {
final FileSystem fileSystem = dfsCluster.getFileSystem();
+
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("target/test-data" + "/minicluster/mapred"));
fileSystem.mkdirs(new Path("/user"));
@@ -737,14 +738,14 @@ public abstract class XTestCase extends TestCase {
if (yarnCluster != null) {
return testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
} else {
- return testConfigurations.createPristineJobConf(getResourceManagerUri(), getNameNodeUri());
+ return testConfigurations.createPristineJobConf(getJobTrackerUri(), getNameNodeUri());
}
}
/**
- * A 'closure' used by {@link XTestCase#executeWhileResourceManagerIsShutdown} method.
+ * A 'closure' used by {@link XTestCase#executeWhileJobTrackerIsShutdown} method.
*/
- public interface ShutdownResourceManagerExecutable {
+ public interface ShutdownJobTrackerExecutable {
/**
* Execute some code
@@ -755,23 +756,16 @@ public abstract class XTestCase extends TestCase {
}
/**
- * Execute some code, expressed via a {@link ShutdownResourceManagerExecutable}, while the ResourceManager is shutdown. Once the code has
- * finished, the ResourceManager is restarted (even if an exception occurs).
+ * Execute some code, expressed via a {@link ShutdownJobTrackerExecutable}, while the JobTracker is shutdown. Once the code has
+ * finished, the JobTracker is restarted (even if an exception occurs).
*
- * @param executable The ShutdownResourceManagerExecutable to execute while the ResourceManager is shutdown
+ * @param executable The ShutdownJobTrackerExecutable to execute while the JobTracker is shutdown
*/
- protected void executeWhileResourceManagerIsShutdown(final ShutdownResourceManagerExecutable executable) {
- for (int i=0; i<yarnCluster.getNumOfResourceManager();i++){
- yarnCluster.getResourceManager(i).stop();
- }
+ protected void executeWhileJobTrackerIsShutdown(final ShutdownJobTrackerExecutable executable) {
try {
executable.execute();
} catch (final Exception e) {
throw new RuntimeException(e);
- } finally {
- for (int i=0; i<yarnCluster.getNumOfResourceManager();i++){
- yarnCluster.getResourceManager(i).start();
- }
}
}
@@ -801,7 +795,7 @@ public abstract class XTestCase extends TestCase {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
- final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getResourceManagerUri());
+ final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
final YarnClient yarnClient = Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), jobConf);
try {
@@ -837,7 +831,7 @@ public abstract class XTestCase extends TestCase {
protected YarnApplicationState getYarnApplicationState(final String externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
- final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getResourceManagerUri());
+ final JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
// This is needed here because we need a mutable final YarnClient
final MutableObject<YarnClient> yarnClientMO = new MutableObject<YarnClient>(null);
try {
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
index 7d36bbf..ed9bba3 100644
--- a/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
+++ b/sharelib/hive/src/test/java/org/apache/oozie/action/hadoop/TestHiveActionExecutor.java
@@ -22,6 +22,7 @@ import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
+import java.io.StringReader;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.Arrays;
@@ -31,14 +32,22 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.ClassUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XmlUtils;
+import org.jdom.Element;
import org.jdom.Namespace;
public class TestHiveActionExecutor extends ActionExecutorTestCase {
@@ -104,7 +113,7 @@ public class TestHiveActionExecutor extends ActionExecutorTestCase {
"</configuration>" +
"<script>" + HIVE_SCRIPT_FILENAME + "</script>" +
"</hive>";
- return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri());
+ return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri());
}
private String getActionQueryXml(String query) {
@@ -133,7 +142,7 @@ public class TestHiveActionExecutor extends ActionExecutorTestCase {
"<value>DEBUG</value>" +
"</property>" +
"</configuration>";
- return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri())
+ return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri())
+ "<query>" + query + "</query>" +
"</hive>";
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/7a67022a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
----------------------------------------------------------------------
diff --git a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
index 89bcd45..39d8ba2 100644
--- a/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
+++ b/sharelib/hive2/src/test/java/org/apache/oozie/action/hadoop/TestHive2ActionExecutor.java
@@ -19,6 +19,7 @@
package org.apache.oozie.action.hadoop;
import java.io.OutputStreamWriter;
+import java.io.StringReader;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.ArrayList;
@@ -28,9 +29,15 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.RunningJob;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.Services;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XmlUtils;
@@ -69,7 +76,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
assertEquals(classes, ae.getLauncherClasses());
Element actionXml = XmlUtils.parseXml("<hive2>" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<jdbc-url>jdbc:hive2://foo:1234/bar</jdbc-url>" +
"<password>pass</password>" +
@@ -110,7 +117,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
String sampleQuery = "SELECT count(*) from foobar";
Element actionXml = XmlUtils.parseXml("<hive2 xmlns=\"uri:oozie:hive2-action:0.2\">" +
- "<job-tracker>" + getResourceManagerUri() + "</job-tracker>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
"<name-node>" + getNameNodeUri() + "</name-node>" +
"<jdbc-url>jdbc:hive2://foo:1234/bar</jdbc-url>" +
"<password>pass</password>" +
@@ -167,7 +174,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
"<password>dummy</password>" +
"<script>" + HIVE_SCRIPT_FILENAME + "</script>" +
"</hive2>";
- return MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
+ return MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
}
private String getQueryActionXml(String query) {
@@ -177,7 +184,7 @@ public class TestHive2ActionExecutor extends ActionExecutorTestCase {
"<configuration></configuration>" +
"<jdbc-url>{2}</jdbc-url>" +
"<password>dummy</password>";
- String expanded = MessageFormat.format(script, getResourceManagerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
+ String expanded = MessageFormat.format(script, getJobTrackerUri(), getNameNodeUri(), getHiveServer2JdbcURL(""));
// MessageFormat strips single quotes, which causes issues with the hive query parser
return expanded +
"<query>" + query + "</query>" + "</hive2>";
[46/48] oozie git commit: Print SQL parameters (debug)
Posted by pb...@apache.org.
Print SQL parameters (debug)
Change-Id: I76e01057013bc8ef6193ec2ddeaddf44961ae97c
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/876acebb
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/876acebb
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/876acebb
Branch: refs/heads/oya
Commit: 876acebb0022e2f893e15d6fd468a816024e9805
Parents: 2e78c23
Author: Peter Bacsko <pb...@cloudera.com>
Authored: Wed Dec 7 12:34:02 2016 +0100
Committer: Peter Bacsko <pb...@cloudera.com>
Committed: Wed Dec 7 12:34:02 2016 +0100
----------------------------------------------------------------------
core/src/main/resources/META-INF/persistence.xml | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/876acebb/core/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/core/src/main/resources/META-INF/persistence.xml b/core/src/main/resources/META-INF/persistence.xml
index edda2d5..a2b0f2b 100644
--- a/core/src/main/resources/META-INF/persistence.xml
+++ b/core/src/main/resources/META-INF/persistence.xml
@@ -76,6 +76,7 @@
<property name="openjpa.jdbc.DBDictionary" value="batchLimit=50"/>
<property name="openjpa.RuntimeUnenhancedClasses" value="unsupported"/>
<property name="openjpa.Log" value="log4j"/>
+ <property name="openjpa.ConnectionFactoryProperties" value="PrintParameters=true" />
</properties>
</persistence-unit>
[29/48] oozie git commit: OOZIE-2742 Unable to kill applications
based on tag (satishsaley via rohini)
Posted by pb...@apache.org.
OOZIE-2742 Unable to kill applications based on tag (satishsaley via rohini)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/ed1e2520
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/ed1e2520
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/ed1e2520
Branch: refs/heads/oya
Commit: ed1e25208c57b8c15150feb2f49f57edfba7b340
Parents: 28ac958
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Wed Nov 23 09:16:01 2016 -0800
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Wed Nov 23 09:16:01 2016 -0800
----------------------------------------------------------------------
.../oozie/action/hadoop/JavaActionExecutor.java | 15 ++-
.../action/hadoop/TestJavaActionExecutor.java | 117 +++++++++----------
.../action/hadoop/LauncherMainHadoopUtils.java | 16 +--
release-log.txt | 1 +
4 files changed, 79 insertions(+), 70 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/ed1e2520/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index f315af7..4beeb96 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -25,6 +25,7 @@ import java.net.ConnectException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
+import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
@@ -52,6 +53,7 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.DiskChecker;
@@ -69,6 +71,7 @@ import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.ShareLibService;
import org.apache.oozie.service.URIHandlerService;
+import org.apache.oozie.service.UserGroupInformationService;
import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.util.ELEvaluationException;
import org.apache.oozie.util.ELEvaluator;
@@ -1594,7 +1597,7 @@ public class JavaActionExecutor extends ActionExecutor {
boolean exception = false;
try {
Element actionXml = XmlUtils.parseXml(action.getConf());
- JobConf jobConf = createBaseHadoopConf(context, actionXml);
+ final JobConf jobConf = createBaseHadoopConf(context, actionXml);
WorkflowJob wfJob = context.getWorkflow();
Configuration conf = null;
if ( wfJob.getConf() != null ) {
@@ -1603,7 +1606,15 @@ public class JavaActionExecutor extends ActionExecutor {
String launcherTag = LauncherMapperHelper.getActionYarnTag(conf, wfJob.getParentId(), action);
jobConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, LauncherMapperHelper.getTag(launcherTag));
jobConf.set(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME, Long.toString(action.getStartTime().getTime()));
- LauncherMainHadoopUtils.killChildYarnJobs(jobConf);
+ UserGroupInformation ugi = Services.get().get(UserGroupInformationService.class)
+ .getProxyUser(context.getWorkflow().getUser());
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ LauncherMainHadoopUtils.killChildYarnJobs(jobConf);
+ return null;
+ }
+ });
jobClient = createJobClient(context, jobConf);
RunningJob runningJob = getRunningJob(context, action, jobClient);
if (runningJob != null) {
http://git-wip-us.apache.org/repos/asf/oozie/blob/ed1e2520/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index 75301db..8965cdf 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@ -544,69 +544,64 @@ public class TestJavaActionExecutor extends ActionExecutorTestCase {
}
public void testChildKill() throws Exception {
- if (HadoopShims.isYARN()) {
- final JobConf clusterConf = createJobConf();
- FileSystem fileSystem = FileSystem.get(clusterConf);
- Path confFile = new Path("/tmp/cluster-conf.xml");
- OutputStream out = fileSystem.create(confFile);
- clusterConf.writeXml(out);
- out.close();
- String confFileName = fileSystem.makeQualified(confFile).toString() + "#core-site.xml";
- final String actionXml = "<java>" +
- "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
- "<name-node>" + getNameNodeUri() + "</name-node>" +
- "<main-class> " + SleepJob.class.getName() + " </main-class>" +
- "<arg>-mt</arg>" +
- "<arg>300000</arg>" +
- "<archive>" + confFileName + "</archive>" +
- "</java>";
- final Context context = createContext(actionXml, null);
- final RunningJob runningJob = submitAction(context);
- waitFor(60 * 1000, new Predicate() {
- @Override
- public boolean evaluate() throws Exception {
- return runningJob.getJobStatus().getRunState() == 1;
- }
- });
- assertFalse(runningJob.isComplete());
- Thread.sleep(15000);
- UserGroupInformationService ugiService = Services.get().
- get(UserGroupInformationService.class);
-
- UserGroupInformation ugi = ugiService.getProxyUser(getTestUser());
- ugi.doAs(new PrivilegedExceptionAction<Object>() {
- @Override
- public Void run() throws Exception {
- JavaActionExecutor ae = new JavaActionExecutor();
- ae.kill(context, context.getAction());
-
- WorkflowJob wfJob = context.getWorkflow();
- Configuration conf = null;
- if (wfJob.getConf() != null) {
- conf = new XConfiguration(new StringReader(wfJob.getConf()));
- }
- String launcherTag = LauncherMapperHelper.getActionYarnTag(conf, wfJob.getParentId(), context.getAction());
- Configuration jobConf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
- jobConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, LauncherMapperHelper.getTag(launcherTag));
- jobConf.setLong(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME,
- context.getAction().getStartTime().getTime());
- Set<String> childSet = LauncherMainHadoopUtils.getChildJobs(jobConf);
- assertEquals(1, childSet.size());
-
- JobClient jobClient = new JobClient(clusterConf);
- for (String jobId : childSet) {
- RunningJob childJob = jobClient.getJob(jobId);
- assertEquals(JobStatus.State.KILLED.getValue(), childJob.getJobStatus().getRunState());
- }
- assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
- return null;
- }
- });
+ final JobConf clusterConf = createJobConf();
+ FileSystem fileSystem = FileSystem.get(clusterConf);
+ Path confFile = new Path("/tmp/cluster-conf.xml");
+ OutputStream out = fileSystem.create(confFile);
+ clusterConf.writeXml(out);
+ out.close();
+ String confFileName = fileSystem.makeQualified(confFile).toString() + "#core-site.xml";
+ final String actionXml = "<java>" +
+ "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" +
+ "<name-node>" + getNameNodeUri() + "</name-node>" +
+ "<main-class> " + SleepJob.class.getName() + " </main-class>" +
+ "<arg>-mt</arg>" +
+ "<arg>300000</arg>" +
+ "<archive>" + confFileName + "</archive>" +
+ "</java>";
+ final Context context = createContext(actionXml, null);
+ final RunningJob runningJob = submitAction(context);
+ waitFor(60 * 1000, new Predicate() {
+ @Override
+ public boolean evaluate() throws Exception {
+ return runningJob.getJobStatus().getRunState() == 1;
+ }
+ });
+ assertFalse(runningJob.isComplete());
+ Thread.sleep(15000);
+ JavaActionExecutor ae = new JavaActionExecutor();
+ ae.kill(context, context.getAction());
- assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
- assertEquals("KILLED", context.getAction().getExternalStatus());
- assertFalse(runningJob.isSuccessful());
+ WorkflowJob wfJob = context.getWorkflow();
+ Configuration conf = null;
+ if (wfJob.getConf() != null) {
+ conf = new XConfiguration(new StringReader(wfJob.getConf()));
}
+ String launcherTag = LauncherMapperHelper.getActionYarnTag(conf, wfJob.getParentId(), context.getAction());
+ final Configuration jobConf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
+ jobConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, LauncherMapperHelper.getTag(launcherTag));
+ jobConf.setLong(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME, context.getAction().getStartTime().getTime());
+
+ UserGroupInformationService ugiService = Services.get().get(UserGroupInformationService.class);
+ UserGroupInformation ugi = ugiService.getProxyUser(getTestUser());
+ Set<String> childSet = ugi.doAs(new PrivilegedExceptionAction<Set<String>>() {
+ @Override
+ public Set<String> run() throws Exception {
+ Set<String> childSet = LauncherMainHadoopUtils.getChildJobs(jobConf);
+ return childSet;
+ }
+ });
+ assertEquals(1, childSet.size());
+
+ JobClient jobClient = new JobClient(clusterConf);
+ for (String jobId : childSet) {
+ RunningJob childJob = jobClient.getJob(jobId);
+ assertEquals(JobStatus.State.KILLED.getValue(), childJob.getJobStatus().getRunState());
+ }
+ assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
+ assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
+ assertEquals("KILLED", context.getAction().getExternalStatus());
+ assertFalse(runningJob.isSuccessful());
}
public void testExceptionSubmitException() throws Exception {
http://git-wip-us.apache.org/repos/asf/oozie/blob/ed1e2520/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java b/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
index 5fda0ef..0cf2e90 100644
--- a/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
+++ b/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
@@ -23,6 +23,9 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
@@ -53,13 +56,12 @@ public class LauncherMainHadoopUtils {
System.out.println("tag id : " + tag);
long startTime = 0L;
try {
- try {
- if(actionConf.get(OOZIE_JOB_LAUNCH_TIME) != null) {
- startTime = Long.parseLong(actionConf.get(OOZIE_JOB_LAUNCH_TIME));
- }
- else {
- startTime = Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
- }
+ if(actionConf.get(OOZIE_JOB_LAUNCH_TIME) != null) {
+ startTime = Long.parseLong(actionConf.get(OOZIE_JOB_LAUNCH_TIME));
+ }
+ else {
+ startTime = Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
+ }
} catch(NumberFormatException nfe) {
throw new RuntimeException("Could not find Oozie job launch time", nfe);
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/ed1e2520/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 52640dd..bd7219c 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -11,6 +11,7 @@ OOZIE-2634 Queue dump command message is confusing when the queue is empty (andr
-- Oozie 4.3.0 release
+OOZIE-2742 Unable to kill applications based on tag (satishsaley via rohini)
OOZIE-2720 Test failure - TestCoordMaterializeTriggerService#testMaxMatThrottleNotPicked (gezapeti via rohini)
OOZIE-2536 Hadoop's cleanup of local directory in uber mode causing failures (satishsaley via rohini)
OOZIE-2723 JSON.org license is now CatX (rkanter, abhishekbafna via shwethags)
[33/48] oozie git commit: Two spaces less in a log message.
Posted by pb...@apache.org.
Two spaces less in a log message.
Change-Id: I19535ac72da0c1b7a9ec0654b02e0e2e47f3b2fe
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/0e9162d0
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/0e9162d0
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/0e9162d0
Branch: refs/heads/oya
Commit: 0e9162d0439e18df629265133004463a87d2a24e
Parents: a671281
Author: Andras Piros <an...@cloudera.com>
Authored: Fri Nov 25 12:41:34 2016 +0100
Committer: Andras Piros <an...@cloudera.com>
Committed: Fri Nov 25 12:41:34 2016 +0100
----------------------------------------------------------------------
.../main/java/org/apache/oozie/command/wf/ActionStartXCommand.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/0e9162d0/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java b/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
index ca155b1..edfac48 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
@@ -224,7 +224,7 @@ public class ActionStartXCommand extends ActionXCommand<org.apache.oozie.command
wfAction.setErrorInfo(null, null);
incrActionCounter(wfAction.getType(), 1);
- LOG.info("Start action [{0}] with user-retry state : userRetryCount [{1}], userRetryMax [{2}], userRetryInterval [{3}]",
+ LOG.info("Start action [{0}] with user-retry state : userRetryCount [{1}], userRetryMax [{2}], userRetryInterval [{3}]",
wfAction.getId(), wfAction.getUserRetryCount(), wfAction.getUserRetryMax(), wfAction
.getUserRetryInterval());