You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by ge...@apache.org on 2017/05/26 09:27:56 UTC
[04/10] oozie git commit: OOZIE-1770 Create Oozie Application Master
for YARN (asasvari, pbacsko, rkanter, gezapeti)
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
index 2d31f9a..390545d 100644
--- a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
@@ -43,14 +43,16 @@ import org.apache.oozie.action.hadoop.JavaActionExecutor;
import org.apache.oozie.action.hadoop.PigActionExecutor;
import org.apache.oozie.action.hadoop.TestJavaActionExecutor;
import org.apache.oozie.client.OozieClient;
-import org.apache.oozie.hadoop.utils.HadoopShims;
import org.apache.oozie.test.XFsTestCase;
+import org.apache.oozie.util.FSUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XmlUtils;
import org.jdom.Element;
import org.junit.Test;
+import com.google.common.collect.Lists;
+
public class TestShareLibService extends XFsTestCase {
Services services;
@@ -87,7 +89,7 @@ public class TestShareLibService extends XFsTestCase {
public static class DummyShareLibService extends ShareLibService {
@Override
- public String findContainingJar(Class clazz) {
+ public String findContainingJar(Class<?> clazz) {
if (JavaActionExecutor.getCommonLauncherClasses().contains(clazz)) {
return testCaseDirPath + "/" + MyOozie.class.getName() + ".jar";
}
@@ -100,8 +102,8 @@ public class TestShareLibService extends XFsTestCase {
}
@Override
- public List<Class> getLauncherClasses() {
- return Arrays.asList((Class) MyPig.class);
+ public List<Class<?>> getLauncherClasses() {
+ return Lists.<Class<?>>newArrayList(MyPig.class);
}
}
@@ -110,8 +112,8 @@ public class TestShareLibService extends XFsTestCase {
}
@Override
- public List<Class> getLauncherClasses() {
- return Arrays.asList((Class) TestHive.class);
+ public List<Class<?>> getLauncherClasses() {
+ return Lists.<Class<?>>newArrayList(TestHive.class);
}
}
@@ -495,11 +497,8 @@ public class TestShareLibService extends XFsTestCase {
assertTrue(shareLibService.getShareLibJars("something_new").get(0).getName().endsWith("somethingNew.jar"));
assertTrue(shareLibService.getShareLibJars("pig").get(0).getName().endsWith("pig.jar"));
assertTrue(shareLibService.getShareLibJars("directjar").get(0).getName().endsWith("direct.jar"));
- // Skipping for hadoop - 1.x because symlink is not supported
- if (HadoopShims.isSymlinkSupported()) {
- assertTrue(
- shareLibService.getShareLibJars("linkFile").get(0).getName().endsWith("targetOfLinkFile.xml"));
- }
+ assertTrue(shareLibService.getShareLibJars("linkFile").get(0).getName().endsWith("targetOfLinkFile.xml"));
+
List<Path> listOfPaths = shareLibService.getShareLibJars("directjar");
for (Path p : listOfPaths) {
assertTrue(p.toString().startsWith("hdfs"));
@@ -615,11 +614,6 @@ public class TestShareLibService extends XFsTestCase {
@Test
public void testMetafileSymlink() throws ServiceException, IOException {
- // Assume.assumeTrue("Skipping for hadoop - 1.x",HadoopFileSystem.isSymlinkSupported());
- if (!HadoopShims.isSymlinkSupported()) {
- return;
- }
-
services = new Services();
setSystemProps();
Configuration conf = services.get(ConfigurationService.class).getConf();
@@ -651,15 +645,14 @@ public class TestShareLibService extends XFsTestCase {
createFile(hive_site.toString());
- HadoopShims fileSystem = new HadoopShims(fs);
- fileSystem.createSymlink(basePath, symlink, true);
- fileSystem.createSymlink(hive_site, symlink_hive_site, true);
+ FSUtils.createSymlink(fs, basePath, symlink, true);
+ FSUtils.createSymlink(fs, hive_site, symlink_hive_site, true);
prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".pig", "/user/test/" + symlink.toString());
prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".hive_conf", "/user/test/" + symlink_hive_site.toString()
+ "#hive-site.xml");
createTestShareLibMetaFile(fs, prop);
- assertEquals(fileSystem.isSymlink(symlink), true);
+ assertEquals(FSUtils.isSymlink(fs, symlink), true);
conf.set(ShareLibService.SHARELIB_MAPPING_FILE, fs.getUri() + "/user/test/config.properties");
conf.set(ShareLibService.SHIP_LAUNCHER_JAR, "true");
@@ -667,9 +660,9 @@ public class TestShareLibService extends XFsTestCase {
ShareLibService shareLibService = Services.get().get(ShareLibService.class);
assertEquals(shareLibService.getShareLibJars("pig").size(), 2);
assertEquals(shareLibService.getShareLibJars("hive_conf").size(), 1);
- new HadoopShims(fs).createSymlink(basePath1, symlink, true);
- new HadoopShims(fs).createSymlink(hive_site1, symlink_hive_site, true);
- assertEquals(new HadoopShims(fs).getSymLinkTarget(shareLibService.getShareLibJars("hive_conf").get(0)),
+ FSUtils.createSymlink(fs, basePath1, symlink, true);
+ FSUtils.createSymlink(fs, hive_site1, symlink_hive_site, true);
+ assertEquals(FSUtils.getSymLinkTarget(fs, shareLibService.getShareLibJars("hive_conf").get(0)),
hive_site1);
assertEquals(shareLibService.getShareLibJars("pig").size(), 3);
}
@@ -781,8 +774,7 @@ public class TestShareLibService extends XFsTestCase {
String symlinkTarget = linkDir.toString() + Path.SEPARATOR + "targetOfLinkFile.xml";
createFile(directJarPath);
createFile(symlinkTarget);
- HadoopShims fsShim = new HadoopShims(fs);
- fsShim.createSymlink(new Path(symlinkTarget), new Path(symlink), true);
+ FSUtils.createSymlink(fs, new Path(symlinkTarget), new Path(symlink), true);
prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".pig", "/user/test/" + basePath.toString());
prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".something_new", "/user/test/" + somethingNew.toString());
@@ -1018,16 +1010,11 @@ public class TestShareLibService extends XFsTestCase {
private void verifyFilesInDistributedCache(URI[] cacheFiles, String... files) {
String cacheFilesStr = Arrays.toString(cacheFiles);
- if (new HadoopShims(getFileSystem()).isYARN()) {
- // Hadoop 2 has two extra jars
- assertEquals(cacheFiles.length, files.length + 2);
- assertTrue(cacheFilesStr.contains("MRAppJar.jar"));
- assertTrue(cacheFilesStr.contains("hadoop-mapreduce-client-jobclient-"));
+ // Hadoop 2 has the following jars too: MRAppJar.jar and hadoop-mapreduce-client-jobclient-
+ assertEquals(cacheFiles.length, files.length + 2);
+ assertTrue(cacheFilesStr.contains("MRAppJar.jar"));
+ assertTrue(cacheFilesStr.contains("hadoop-mapreduce-client-jobclient-"));
- }
- else {
- assertEquals(cacheFiles.length, files.length);
- }
for (String file : files) {
assertTrue(cacheFilesStr.contains(file));
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
index ea778bd..2105e2f 100644
--- a/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XDataTestCase.java
@@ -1452,7 +1452,7 @@ public abstract class XDataTestCase extends XHCatTestCase {
action.setUserRetryMax(2);
action.setUserRetryInterval(1);
action.setErrorInfo("dummyErrorCode", "dummyErrorMessage");
- action.setExternalId("dummy external id");
+ action.setExternalId("application_1234567890123_0001");
action.setExternalStatus("RUNNING");
return action;
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
index 1d399e4..b017b41 100644
--- a/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
@@ -28,11 +28,16 @@ import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
+import org.apache.oozie.client.WorkflowAction;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext;
import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
+import org.apache.oozie.service.Services;
import java.io.IOException;
import java.net.URI;
+import java.net.URISyntaxException;
/**
* Base JUnit <code>TestCase</code> subclass used by all Oozie testcases that need Hadoop FS access. <p/> As part of its
@@ -175,4 +180,27 @@ public abstract class XFsTestCase extends XTestCase {
return has.createJobClient(getTestUser(), conf);
}
+ /**
+ * Returns a Path object to a filesystem resource which belongs to a specific workflow on HDFS
+ * Example: /user/test/oozie-abcd/0000003-160913132555310-oozie-abcd-W/hadoop--map-reduce/launcher.xml
+ *
+ * @param userName current username
+ * @param action workflow Action object
+ * @param services Oozie Services class
+ * @param context Executor context
+ * @param fileName the filename
+ * @return the Path object which represents a file on HDFS
+ * @throws Exception
+ */
+ protected Path getPathToWorkflowResource(String userName, WorkflowJob job, Services services,
+ ActionExecutorContext context, String fileName) throws Exception {
+ return new Path(
+ "/user" +
+ "/" + userName +
+ "/" + services.getSystemId() +
+ "/" + job.getId() +
+ "/" + context.getActionDir().getName(),
+ fileName
+ );
+ }
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index e1d9068..d7ce9b2 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -28,6 +28,7 @@ import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.util.ArrayList;
+import java.util.EnumSet;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.List;
@@ -42,8 +43,9 @@ import javax.persistence.FlushModeType;
import javax.persistence.Query;
import junit.framework.TestCase;
-import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.conf.Configuration;
@@ -56,6 +58,11 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.oozie.BundleActionBean;
@@ -69,6 +76,7 @@ import org.apache.oozie.dependency.FSURIHandler;
import org.apache.oozie.dependency.HCatURIHandler;
import org.apache.oozie.service.ConfigurationService;
import org.apache.oozie.service.HCatAccessorService;
+import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
import org.apache.oozie.service.JMSAccessorService;
import org.apache.oozie.service.JPAService;
@@ -82,6 +90,7 @@ import org.apache.oozie.sla.SLASummaryBean;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.test.MiniHCatServer.RUNMODE;
import org.apache.oozie.test.hive.MiniHS2;
+import org.apache.oozie.util.ClasspathUtils;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XConfiguration;
@@ -104,6 +113,8 @@ import org.apache.oozie.util.XLog;
* From within testcases, system properties must be changed using the {@link #setSystemProperty} method.
*/
public abstract class XTestCase extends TestCase {
+ private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES = EnumSet.of(YarnApplicationState.FAILED,
+ YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
private Map<String, String> sysProps;
private String testCaseDir;
private String testCaseConfDir;
@@ -898,6 +909,7 @@ public abstract class XTestCase extends TestCase {
private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null;
+ // TODO: OYA: replace with MiniYarnCluster or MiniMRYarnCluster
private static MiniMRCluster mrCluster = null;
private static MiniHCatServer hcatServer = null;
private static MiniHS2 hiveserver2 = null;
@@ -905,9 +917,11 @@ public abstract class XTestCase extends TestCase {
private void setUpEmbeddedHadoop(String testCaseDir) throws Exception {
if (dfsCluster == null && mrCluster == null) {
- if (System.getProperty("hadoop.log.dir") == null) {
- System.setProperty("hadoop.log.dir", testCaseDir);
- }
+ if (System.getProperty("hadoop.log.dir") == null) {
+ System.setProperty("hadoop.log.dir", testCaseDir);
+ }
+ // Tell the ClasspathUtils that we're using a mini cluster
+ ClasspathUtils.setUsingMiniYarnCluster(true);
int taskTrackers = 2;
int dataNodes = 2;
String oozieUser = getOozieUser();
@@ -1216,6 +1230,46 @@ public abstract class XTestCase extends TestCase {
return services;
}
+ protected YarnApplicationState waitUntilYarnAppState(String externalId, final EnumSet<YarnApplicationState> acceptedStates)
+ throws HadoopAccessorException, IOException, YarnException {
+ final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
+ final MutableObject<YarnApplicationState> finalState = new MutableObject<YarnApplicationState>();
+
+ Configuration conf = Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
+ final YarnClient yarnClient = Services.get().get(HadoopAccessorService.class).createYarnClient(getTestUser(), conf);
+
+ try {
+ waitFor(60 * 1000, new Predicate() {
+ @Override
+ public boolean evaluate() throws Exception {
+ YarnApplicationState state = yarnClient.getApplicationReport(appId).getYarnApplicationState();
+ finalState.setValue(state);
+
+ return acceptedStates.contains(state);
+ }
+ });
+ } finally {
+ if (yarnClient != null) {
+ yarnClient.close();
+ }
+ }
+
+ log.info("Final state is: {0}", finalState.getValue());
+ return finalState.getValue();
+ }
+
+ protected void waitUntilYarnAppDoneAndAssertSuccess(String externalId)
+ throws HadoopAccessorException, IOException, YarnException {
+ YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ assertEquals("YARN App state", YarnApplicationState.FINISHED, state);
+ }
+
+ protected void waitUntilYarnAppKilledAndAssertSuccess(String externalId)
+ throws HadoopAccessorException, IOException, YarnException {
+ YarnApplicationState state = waitUntilYarnAppState(externalId, YARN_TERMINAL_STATES);
+ assertEquals("YARN App state", YarnApplicationState.KILLED, state);
+ }
+
protected class TestLogAppender extends AppenderSkeleton {
private final List<LoggingEvent> log = new ArrayList<LoggingEvent>();
@@ -1243,4 +1297,3 @@ public abstract class XTestCase extends TestCase {
}
}
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/util/TestClasspathUtils.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/util/TestClasspathUtils.java b/core/src/test/java/org/apache/oozie/util/TestClasspathUtils.java
new file mode 100644
index 0000000..2e732cb
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/util/TestClasspathUtils.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.util;
+
+import junit.framework.TestCase;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.filecache.DistributedCache;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.oozie.test.XFsTestCase;
+import org.apache.oozie.test.XTestCase;
+
+import java.net.URI;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+public class TestClasspathUtils extends XFsTestCase {
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ // This is normally true, and adds the entirety of the current classpath in ClasspathUtils, which we don't want to test or
+ // worry about here. Temporarily set this back to false so it behaves normally.
+ ClasspathUtils.setUsingMiniYarnCluster(false);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ // Make sure to turn this back on for subsequent tests
+ ClasspathUtils.setUsingMiniYarnCluster(true);
+ super.tearDown();
+ }
+
+ public void testSetupClasspath() throws Exception {
+ Configuration conf = new Configuration(false);
+ Map<String, String> env = new HashMap<String, String>();
+
+ Path p1 = new Path(getFsTestCaseDir(), "foo.xml");
+ getFileSystem().createNewFile(p1);
+ DistributedCache.addFileToClassPath(p1, conf);
+
+ Path p2 = new Path(getFsTestCaseDir(), "foo.txt");
+ getFileSystem().createNewFile(p2);
+ DistributedCache.addFileToClassPath(p2, conf);
+
+ Path p3 = new Path(getFsTestCaseDir(), "foo.zip");
+ getFileSystem().createNewFile(p3);
+ DistributedCache.addArchiveToClassPath(p3, conf);
+
+ ClasspathUtils.setupClasspath(env, conf);
+
+ assertEquals(2, env.size());
+ assertTrue(env.containsKey("CLASSPATH"));
+ String[] paths = env.get("CLASSPATH").split(":");
+ assertEquals(9, paths.length);
+ Arrays.sort(paths);
+ assertEquals("$HADOOP_COMMON_HOME/share/hadoop/common/*", paths[0]);
+ assertEquals("$HADOOP_COMMON_HOME/share/hadoop/common/lib/*", paths[1]);
+ assertEquals("$HADOOP_CONF_DIR", paths[2]);
+ assertEquals("$HADOOP_HDFS_HOME/share/hadoop/hdfs/*", paths[3]);
+ assertEquals("$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*", paths[4]);
+ assertEquals("$HADOOP_YARN_HOME/share/hadoop/yarn/*", paths[5]);
+ assertEquals("$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*", paths[6]);
+ assertEquals("$PWD", paths[7]);
+ assertEquals("$PWD/*", paths[8]);
+
+ assertTrue(env.containsKey("$PWD"));
+ paths = env.get("$PWD").split(":");
+ assertEquals(3, paths.length);
+ Arrays.sort(paths);
+ assertEquals("$PWD/foo.txt", paths[0]);
+ assertEquals("$PWD/foo.xml", paths[1]);
+ assertEquals("$PWD/foo.zip", paths[2]);
+ }
+
+ public void testAddMapReduceToClasspath() throws Exception {
+ Configuration conf = new Configuration(false);
+ Map<String, String> env = new HashMap<String, String>();
+
+ ClasspathUtils.addMapReduceToClasspath(env, conf);
+
+ assertEquals(1, env.size());
+ assertTrue(env.containsKey("CLASSPATH"));
+ String[] paths = env.get("CLASSPATH").split(":");
+ assertEquals(2, paths.length);
+ Arrays.sort(paths);
+ assertEquals("$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*", paths[0]);
+ assertEquals("$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*", paths[1]);
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
index a3c7b14..0557166 100644
--- a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
+++ b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
@@ -54,7 +54,6 @@ public class TestLiteWorkflowAppParser extends XTestCase {
private String cleanupXml(String xml) {
xml = xml.replaceAll(" xmlns=?(\"|\')(\"|\')", "");
xml = xml.replaceAll("\\s*<source>.*</source>", ""); // remove the <source> added by Hadoop 2
- xml = xml.replaceAll("\\s*<!--Loaded from Unknown-->", ""); // remove the <!--LoadedfromUnknown--> added by Hadoop 1.2.1
return xml;
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/distro/src/main/bin/addtowar.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/addtowar.sh b/distro/src/main/bin/addtowar.sh
index 3990409..688ba6b 100644
--- a/distro/src/main/bin/addtowar.sh
+++ b/distro/src/main/bin/addtowar.sh
@@ -95,29 +95,13 @@ function checkOption() {
#get the list of hadoop jars that will be injected based on the hadoop version
function getHadoopJars() {
version=$1
- if [ "${version}" = "0.20.1" ]; then
- #List is separated by ":"
- hadoopJars="hadoop-core*.jar"
- elif [ "${version}" = "0.20.2" ]; then
- #List is separated by ":"
- hadoopJars="hadoop-core*.jar"
- elif [ "${version}" = "0.20.104" ]; then
- #List is separated by ":"
- hadoopJars="hadoop-core*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar"
- elif [ "${version}" = "0.20.200" ]; then
- #List is separated by ":"
- hadoopJars="hadoop-core*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar:commons-configuration-*.jar"
- elif [[ "${version}" =~ .*23 ]]; then
- suffix="-[0-9.]*"
- #List is separated by ":"
- hadoopJars="hadoop-mapreduce-client-core${suffix}.jar:hadoop-mapreduce-client-common${suffix}.jar:hadoop-mapreduce-client-jobclient${suffix}.jar:hadoop-mapreduce-client-app${suffix}.jar:hadoop-yarn-common${suffix}.jar:hadoop-yarn-api${suffix}.jar:hadoop-hdfs${suffix}.jar:hadoop-common${suffix}.jar:hadoop-auth${suffix}.jar:guava*.jar:protobuf-*.jar:avro-ipc-*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar:commons-configuration-*.jar"
- elif [[ "${version}" =~ 2.* ]]; then
+ if [[ "${version}" =~ (2|3).* ]]; then
suffix="-[0-9.]*"
#List is separated by ":"
hadoopJars="hadoop-mapreduce-client-core${suffix}.jar:hadoop-mapreduce-client-common${suffix}.jar:hadoop-mapreduce-client-jobclient${suffix}.jar:hadoop-mapreduce-client-app${suffix}.jar:hadoop-yarn-common${suffix}.jar:hadoop-yarn-api${suffix}.jar:hadoop-yarn-client${suffix}.jar:hadoop-hdfs${suffix}.jar:hadoop-common${suffix}.jar:hadoop-auth${suffix}.jar:guava*.jar:protobuf-*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar:commons-configuration-*.jar:commons-cli-*.jar:commons-io-*.jar"
else
echo
- echo "Exiting: Unsupported Hadoop version '${hadoopVer}', supported versions: 0.20.1, 0.20.2, 0.20.104, 0.20.200, 0.23.x and 2.x"
+ echo "Exiting: Unsupported Hadoop version '${hadoopVer}', supported versions: 2.x"
echo
cleanUp
exit -1;
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki b/docs/src/site/twiki/DG_QuickStart.twiki
index 7ea87c7..8c3d664 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -41,15 +41,13 @@ suitable when same oozie package needs to be used in multiple set-ups with diffe
2. Build with -Puber which will bundle the required libraries in the oozie war. Further, the following options are
available to customise the versions of the dependencies:
--P<profile> - default hadoop-2. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
-profile depending on the hadoop version used.
+-Dhadoop.version=<version> - default 2.6.0
-Ptez - Bundle tez jars in hive and pig sharelibs. Useful if you want to use tez
+as the execution engine for those applications.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.6.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
-Dhadoop.auth.version=<version> - defaults to hadoop version
-Ddistcp.version=<version> - defaults to hadoop version
-Dpig.version=<version> - default 0.16.0
--Dpig.classifier=<classifier> - default none
+-Dpig.classifier=<classifier> - default h2
-Dsqoop.version=<version> - default 1.4.3
-Dsqoop.classifier=<classifier> - default hadoop100
-Djetty.version=<version> - default 9.2.19.v20160908
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/ENG_Building.twiki b/docs/src/site/twiki/ENG_Building.twiki
index 535f1b0..36d77b2 100644
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@ -112,9 +112,9 @@ Except for the options marked with =(*)=, the options can be specified in the =t
of the Oozie project. The options marked with =(*)= are used in Maven POMs, thus they don't take effect if
specified in the =test.properties= file (which is loaded by the =XTestCase= class at class initialization time).
-*hadoop.version* =(*)=: indicates the Hadoop version(Hadoop-1 or Hadoop-2) you wish to build Oozie against specifically. It will
-substitute this value in the Oozie POM properties and pull the corresponding Hadoop artifacts from Maven. Default version is 1.2.1
-for Hadoop-1 (the most common case). For Hadoop-2, the version you can pass is *2.6.0*.
+*hadoop.version* =(*)=: indicates the Hadoop version you wish to build Oozie against specifically. It will
+substitute this value in the Oozie POM properties and pull the corresponding Hadoop artifacts from Maven.
+The default version is 2.6.0 and that is the minimum supported Hadoop version.
*generateSite* (*): generates Oozie documentation, default is undefined (no documentation is generated)
@@ -211,15 +211,13 @@ $ bin/mkdistro.sh [-DskipTests]
Running =mkdistro.sh= will create the binary distribution of Oozie. The following options are available to customise
the versions of the dependencies:
-Puber - Bundle required hadoop and hcatalog libraries in oozie war
--P<profile> - default hadoop-2. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
-profile depending on the hadoop version used.
+-Dhadoop.version=<version> - default 2.6.0
-Ptez - Bundle tez jars in hive and pig sharelibs. Useful if you want to use tez
as the execution engine for those applications.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.6.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
-Dhadoop.auth.version=<version> - defaults to hadoop version
-Ddistcp.version=<version> - defaults to hadoop version
-Dpig.version=<version> - default 0.16.0
--Dpig.classifier=<classifier> - default none
+-Dpig.classifier=<classifier> - default h2
-Dsqoop.version=<version> - default 1.4.3
-Dsqoop.classifier=<classifier> - default hadoop100
-jetty.version=<version> - default 9.2.19.v20160908
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/findbugs-filter.xml
----------------------------------------------------------------------
diff --git a/findbugs-filter.xml b/findbugs-filter.xml
new file mode 100644
index 0000000..03ee4d1
--- /dev/null
+++ b/findbugs-filter.xml
@@ -0,0 +1,32 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<FindBugsFilter>
+ <!-- excluded because eventServie is static and it cannot be simply rewritten -->
+ <Match>
+ <Class name="org.apache.oozie.command.XCommand" />
+ <Field name="eventService" />
+ <Bug pattern="MS_CANNOT_BE_FINAL" />
+ </Match>
+
+ <!-- excluded due to the complicated usage of keySet()/get() methods -->
+ <Match>
+ <Class name="org.apache.oozie.util.Instrumentation" />
+ <Method name="toString" />
+ <Bug pattern="WMI_WRONG_MAP_ITERATOR" />
+ </Match>
+</FindBugsFilter>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-auth-1/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-auth-1/pom.xml b/hadooplibs/hadoop-auth-1/pom.xml
deleted file mode 100644
index 85e2298..0000000
--- a/hadooplibs/hadoop-auth-1/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-auth</artifactId>
- <version>hadoop-1-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Auth</description>
- <name>Apache Oozie Hadoop Auth ${project.version}</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-core</artifactId>
- <scope>compile</scope>
- </dependency>
- </dependencies>
-</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-auth-2/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-auth-2/pom.xml b/hadooplibs/hadoop-auth-2/pom.xml
deleted file mode 100644
index 7202b2e..0000000
--- a/hadooplibs/hadoop-auth-2/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-auth</artifactId>
- <version>hadoop-2-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop</description>
- <name>Apache Oozie Hadoop Auth ${project.version} Test</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-auth</artifactId>
- <scope>compile</scope>
- </dependency>
- </dependencies>
-</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-distcp-1/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-1/pom.xml b/hadooplibs/hadoop-distcp-1/pom.xml
deleted file mode 100644
index bbc536b..0000000
--- a/hadooplibs/hadoop-distcp-1/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-distcp</artifactId>
- <version>hadoop-1-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Distcp ${project.version}</description>
- <name>Apache Oozie Hadoop Distcp ${project.version}</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-tools</artifactId>
- <scope>compile</scope>
- </dependency>
- </dependencies>
-</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-distcp-2/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-2/pom.xml b/hadooplibs/hadoop-distcp-2/pom.xml
deleted file mode 100644
index 2c21b12..0000000
--- a/hadooplibs/hadoop-distcp-2/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-distcp</artifactId>
- <version>hadoop-2-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Distcp ${project.version}</description>
- <name>Apache Oozie Hadoop Distcp ${project.version}</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-distcp</artifactId>
- <scope>compile</scope>
- </dependency>
- </dependencies>
-</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-distcp-3/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-3/pom.xml b/hadooplibs/hadoop-distcp-3/pom.xml
deleted file mode 100644
index 39cf9f2..0000000
--- a/hadooplibs/hadoop-distcp-3/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-distcp</artifactId>
- <version>hadoop-3-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Distcp ${project.version}</description>
- <name>Apache Oozie Hadoop Distcp ${project.version}</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-distcp</artifactId>
- </dependency>
- </dependencies>
-
-</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-1/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-1/pom.xml b/hadooplibs/hadoop-utils-1/pom.xml
deleted file mode 100644
index a53e9bc..0000000
--- a/hadooplibs/hadoop-utils-1/pom.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-utils</artifactId>
- <version>hadoop-1-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Utils</description>
- <name>Apache Oozie Hadoop Utils</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- </dependency>
- </dependencies>
-</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java b/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
deleted file mode 100644
index c0a2377..0000000
--- a/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import org.apache.hadoop.conf.Configuration;
-import java.util.Set;
-
-
-public class LauncherMainHadoopUtils {
-
- public static final String CHILD_MAPREDUCE_JOB_TAGS = "oozie.child.mapreduce.job.tags";
- public static final String OOZIE_JOB_LAUNCH_TIME = "oozie.job.launch.time";
-
- private LauncherMainHadoopUtils() {
- }
-
- public static void killChildYarnJobs(Configuration actionConf) {
- // no-op
- }
-
- public static Set<String> getChildJobs(Configuration actionConf) {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java b/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
deleted file mode 100644
index 799dffb..0000000
--- a/hadooplibs/hadoop-utils-1/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.hadoop.utils;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import java.io.IOException;
-
-public class HadoopShims {
- FileSystem fs;
-
- public HadoopShims(FileSystem fs) {
- this.fs = fs;
- }
-
- public static boolean isSymlinkSupported() {
- return false;
- }
-
- public Path getSymLinkTarget(Path p) throws IOException {
- return p;
- }
-
- public boolean isSymlink(Path p) throws IOException {
- return false;
- }
-
- public void createSymlink(Path target, Path link, boolean createParent) throws IOException {
- }
-
- public static boolean isYARN() {
- return false;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-2/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-2/pom.xml b/hadooplibs/hadoop-utils-2/pom.xml
deleted file mode 100644
index 7c4a0a1..0000000
--- a/hadooplibs/hadoop-utils-2/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-utils</artifactId>
- <version>hadoop-2-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Utils ${project.version}</description>
- <name>Apache Oozie Hadoop Utils ${project.version}</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- <scope>provided</scope>
- </dependency>
- </dependencies>
-</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java b/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
deleted file mode 100644
index a0b7d62..0000000
--- a/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import java.io.IOException;
-import java.lang.String;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.client.ClientRMProxy;
-import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-
-public class LauncherMainHadoopUtils {
-
- public static final String CHILD_MAPREDUCE_JOB_TAGS = "oozie.child.mapreduce.job.tags";
- public static final String OOZIE_JOB_LAUNCH_TIME = "oozie.job.launch.time";
-
- private LauncherMainHadoopUtils() {
- }
-
- private static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
- System.out.println("Fetching child yarn jobs");
- Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
- String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
- if (tag == null) {
- System.out.print("Could not find Yarn tags property " + CHILD_MAPREDUCE_JOB_TAGS);
- return childYarnJobs;
- }
- System.out.println("tag id : " + tag);
- long startTime = 0L;
- try {
- if(actionConf.get(OOZIE_JOB_LAUNCH_TIME) != null) {
- startTime = Long.parseLong(actionConf.get(OOZIE_JOB_LAUNCH_TIME));
- }
- else {
- startTime = Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
- }
- } catch(NumberFormatException nfe) {
- throw new RuntimeException("Could not find Oozie job launch time", nfe);
- }
-
- GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
- gar.setScope(ApplicationsRequestScope.OWN);
- gar.setApplicationTags(Collections.singleton(tag));
- long endTime = System.currentTimeMillis();
- if (startTime > endTime) {
- System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
- "Attempting to work around...");
- // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
- // offset in both directions
- long diff = 2 * (startTime - endTime);
- startTime = startTime - diff;
- endTime = endTime + diff;
- }
- gar.setStartRange(startTime, endTime);
- try {
- ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(actionConf, ApplicationClientProtocol.class);
- GetApplicationsResponse apps = proxy.getApplications(gar);
- List<ApplicationReport> appsList = apps.getApplicationList();
- for(ApplicationReport appReport : appsList) {
- childYarnJobs.add(appReport.getApplicationId());
- }
- } catch (IOException ioe) {
- throw new RuntimeException("Exception occurred while finding child jobs", ioe);
- } catch (YarnException ye) {
- throw new RuntimeException("Exception occurred while finding child jobs", ye);
- }
-
- System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
- return childYarnJobs;
- }
-
- public static void killChildYarnJobs(Configuration actionConf) {
- try {
- Set<ApplicationId> childYarnJobs = getChildYarnJobs(actionConf);
- if (!childYarnJobs.isEmpty()) {
- System.out.println();
- System.out.println("Found [" + childYarnJobs.size() + "] Map-Reduce jobs from this launcher");
- System.out.println("Killing existing jobs and starting over:");
- YarnClient yarnClient = YarnClient.createYarnClient();
- yarnClient.init(actionConf);
- yarnClient.start();
- for (ApplicationId app : childYarnJobs) {
- System.out.print("Killing job [" + app + "] ... ");
- yarnClient.killApplication(app);
- System.out.println("Done");
- }
- System.out.println();
- }
- } catch (YarnException ye) {
- throw new RuntimeException("Exception occurred while killing child job(s)", ye);
- } catch (IOException ioe) {
- throw new RuntimeException("Exception occurred while killing child job(s)", ioe);
- }
- }
-
- public static Set<String> getChildJobs(Configuration actionConf) {
- Set<String> jobList = new HashSet<String>();
- for(ApplicationId applicationId :getChildYarnJobs(actionConf)) {
- jobList.add(applicationId.toString().replace("application", "job"));
- }
- return jobList;
- }
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java b/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
deleted file mode 100644
index c21884c..0000000
--- a/hadooplibs/hadoop-utils-2/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.hadoop.utils;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-import java.io.IOException;
-import java.net.URI;
-
-public class HadoopShims {
- FileSystem fs;
-
- public HadoopShims(FileSystem fs) {
- this.fs = fs;
- }
-
- public static boolean isSymlinkSupported() {
- return true;
- }
-
- public Path getSymLinkTarget(Path p) throws IOException {
- try {
- //getSymlink doesn't work with fragment name, need to remove fragment before calling getSymlink
- Path tempPath = new URI(p.toString()).getFragment() == null ? p : new Path(new URI(p.toString()).getPath());
- return fs.getFileLinkStatus(tempPath).getSymlink();
- }
- catch (java.net.URISyntaxException e) {
- throw new IOException(e);
- }
- }
-
- public boolean isSymlink(Path p) throws IOException {
- try {
- //isSymlink doesn't work with fragment name, need to remove fragment before checking for symlink
- Path tempPath = new URI(p.toString()).getFragment() == null ? p : new Path(new URI(p.toString()).getPath());
- return fs.getFileLinkStatus(tempPath).isSymlink();
- }
- catch (java.net.URISyntaxException e) {
- throw new IOException(e);
- }
- }
-
- public void createSymlink(Path target, Path link, boolean createParent) throws IOException {
- fs.createSymlink(target, link, createParent);
- }
-
- public static boolean isYARN() {
- return true;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-3/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-3/pom.xml b/hadooplibs/hadoop-utils-3/pom.xml
deleted file mode 100644
index 2f36f8b..0000000
--- a/hadooplibs/hadoop-utils-3/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-utils</artifactId>
- <version>hadoop-3-4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Utils ${project.version}</description>
- <name>Apache Oozie Hadoop Utils ${project.version}</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- <scope>provided</scope>
- </dependency>
- </dependencies>
-</project>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java b/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
deleted file mode 100644
index 0cf2e90..0000000
--- a/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang.StringUtils;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.client.ClientRMProxy;
-import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-
-public class LauncherMainHadoopUtils {
-
- public static final String CHILD_MAPREDUCE_JOB_TAGS = "oozie.child.mapreduce.job.tags";
- public static final String OOZIE_JOB_LAUNCH_TIME = "oozie.job.launch.time";
-
- private LauncherMainHadoopUtils() {
- }
-
- private static Set<ApplicationId> getChildYarnJobs(Configuration actionConf) {
- System.out.println("Fetching child yarn jobs");
- Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
- String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
- if (tag == null) {
- System.out.print("Could not find Yarn tags property " + CHILD_MAPREDUCE_JOB_TAGS);
- return childYarnJobs;
- }
- System.out.println("tag id : " + tag);
- long startTime = 0L;
- try {
- if(actionConf.get(OOZIE_JOB_LAUNCH_TIME) != null) {
- startTime = Long.parseLong(actionConf.get(OOZIE_JOB_LAUNCH_TIME));
- }
- else {
- startTime = Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
- }
- } catch(NumberFormatException nfe) {
- throw new RuntimeException("Could not find Oozie job launch time", nfe);
- }
-
- GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
- gar.setScope(ApplicationsRequestScope.OWN);
- gar.setApplicationTags(Collections.singleton(tag));
- long endTime = System.currentTimeMillis();
- if (startTime > endTime) {
- System.out.println("WARNING: Clock skew between the Oozie server host and this host detected. Please fix this. " +
- "Attempting to work around...");
- // We don't know which one is wrong (relative to the RM), so to be safe, let's assume they're both wrong and add an
- // offset in both directions
- long diff = 2 * (startTime - endTime);
- startTime = startTime - diff;
- endTime = endTime + diff;
- }
- gar.setStartRange(startTime, endTime);
- try {
- ApplicationClientProtocol proxy = ClientRMProxy.createRMProxy(actionConf, ApplicationClientProtocol.class);
- GetApplicationsResponse apps = proxy.getApplications(gar);
- List<ApplicationReport> appsList = apps.getApplicationList();
- for(ApplicationReport appReport : appsList) {
- childYarnJobs.add(appReport.getApplicationId());
- }
- } catch (IOException ioe) {
- throw new RuntimeException("Exception occurred while finding child jobs", ioe);
- } catch (YarnException ye) {
- throw new RuntimeException("Exception occurred while finding child jobs", ye);
- }
-
- System.out.println("Child yarn jobs are found - " + StringUtils.join(childYarnJobs, ","));
- return childYarnJobs;
- }
-
- public static void killChildYarnJobs(Configuration actionConf) {
- try {
- Set<ApplicationId> childYarnJobs = getChildYarnJobs(actionConf);
- if (!childYarnJobs.isEmpty()) {
- System.out.println();
- System.out.println("Found [" + childYarnJobs.size() + "] Map-Reduce jobs from this launcher");
- System.out.println("Killing existing jobs and starting over:");
- YarnClient yarnClient = YarnClient.createYarnClient();
- yarnClient.init(actionConf);
- yarnClient.start();
- for (ApplicationId app : childYarnJobs) {
- System.out.print("Killing job [" + app + "] ... ");
- yarnClient.killApplication(app);
- System.out.println("Done");
- }
- System.out.println();
- }
- } catch (YarnException ye) {
- throw new RuntimeException("Exception occurred while killing child job(s)", ye);
- } catch (IOException ioe) {
- throw new RuntimeException("Exception occurred while killing child job(s)", ioe);
- }
- }
-
- public static Set<String> getChildJobs(Configuration actionConf) {
- Set<String> jobList = new HashSet<String>();
- for(ApplicationId applicationId :getChildYarnJobs(actionConf)) {
- jobList.add(applicationId.toString().replace("application", "job"));
- }
- return jobList;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java b/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
deleted file mode 100644
index c21884c..0000000
--- a/hadooplibs/hadoop-utils-3/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.hadoop.utils;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-import java.io.IOException;
-import java.net.URI;
-
-public class HadoopShims {
- FileSystem fs;
-
- public HadoopShims(FileSystem fs) {
- this.fs = fs;
- }
-
- public static boolean isSymlinkSupported() {
- return true;
- }
-
- public Path getSymLinkTarget(Path p) throws IOException {
- try {
- //getSymlink doesn't work with fragment name, need to remove fragment before calling getSymlink
- Path tempPath = new URI(p.toString()).getFragment() == null ? p : new Path(new URI(p.toString()).getPath());
- return fs.getFileLinkStatus(tempPath).getSymlink();
- }
- catch (java.net.URISyntaxException e) {
- throw new IOException(e);
- }
- }
-
- public boolean isSymlink(Path p) throws IOException {
- try {
- //isSymlink doesn't work with fragment name, need to remove fragment before checking for symlink
- Path tempPath = new URI(p.toString()).getFragment() == null ? p : new Path(new URI(p.toString()).getPath());
- return fs.getFileLinkStatus(tempPath).isSymlink();
- }
- catch (java.net.URISyntaxException e) {
- throw new IOException(e);
- }
- }
-
- public void createSymlink(Path target, Path link, boolean createParent) throws IOException {
- fs.createSymlink(target, link, createParent);
- }
-
- public static boolean isYARN() {
- return true;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/hadooplibs/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/pom.xml b/hadooplibs/pom.xml
deleted file mode 100644
index 06801cc..0000000
--- a/hadooplibs/pom.xml
+++ /dev/null
@@ -1,82 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-main</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- </parent>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadooplibs</artifactId>
- <version>4.4.0-SNAPSHOT</version>
- <description>Apache Oozie Hadoop Libs</description>
- <name>Apache Oozie Hadoop Libs</name>
- <packaging>pom</packaging>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- </plugins>
- </build>
-
- <profiles>
- <profile>
- <id>hadoop-1</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <modules>
- <module>hadoop-utils-1</module>
- <module>hadoop-distcp-1</module>
- <module>hadoop-auth-1</module>
- </modules>
- </profile>
- <profile>
- <id>hadoop-2</id>
- <activation>
- <activeByDefault>true</activeByDefault>
- </activation>
- <modules>
- <module>hadoop-utils-2</module>
- <module>hadoop-distcp-2</module>
- <module>hadoop-auth-2</module>
- </modules>
- </profile>
- <profile>
- <id>hadoop-3</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <modules>
- <module>hadoop-distcp-3</module>
- <module>hadoop-auth-3</module>
- <module>hadoop-utils-3</module>
- </modules>
- </profile>
- </profiles>
-</project>
-
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c4a1c25..fd57444 100644
--- a/pom.xml
+++ b/pom.xml
@@ -81,28 +81,27 @@
<hadoop.version>2.6.0</hadoop.version>
<hadoop.majorversion>2</hadoop.majorversion>
<hadooplib.version>hadoop-${hadoop.majorversion}-${project.version}</hadooplib.version>
- <oozie.hadoop-utils.version>hadoop-${hadoop.majorversion}-${project.version}</oozie.hadoop-utils.version>
<hbase.version>0.94.27</hbase.version>
<dropwizard.metrics.version>3.1.2</dropwizard.metrics.version>
<clover.license>/home/jenkins/tools/clover/latest/lib/clover.license</clover.license>
+
<!-- Sharelib component versions -->
<hive.version>1.2.0</hive.version>
<hive.jline.version>2.12</hive.jline.version>
- <hive.classifier>core</hive.classifier>
<pig.version>0.16.0</pig.version>
- <pig.classifier></pig.classifier>
+ <pig.classifier>h2</pig.classifier>
+ <hive.classifier>core</hive.classifier>
<sqoop.version>1.4.3</sqoop.version>
<spark.version>1.6.1</spark.version>
<spark.streaming.kafka.version>1.6.1</spark.streaming.kafka.version>
<spark.bagel.version>1.6.1</spark.bagel.version>
<spark.guava.version>14.0.1</spark.guava.version>
<spark.scala.binary.version>2.10</spark.scala.binary.version>
- <sqoop.classifier>hadoop100</sqoop.classifier>
+ <sqoop.classifier>hadoop200</sqoop.classifier>
<streaming.version>${hadoop.version}</streaming.version>
- <distcp.version>${hadoop.version}</distcp.version>
<hadoop.auth.version>${hadoop.version}</hadoop.auth.version>
<tez.version>0.8.4</tez.version>
@@ -112,7 +111,7 @@
<openjpa.version>2.4.1</openjpa.version>
<xerces.version>2.10.0</xerces.version>
<curator.version>2.5.0</curator.version>
- <jackson.version>1.8.8</jackson.version>
+ <jackson.version>1.9.13</jackson.version>
<log4j.version>1.2.17</log4j.version>
<activemq.version>5.13.3</activemq.version>
<httpcore.version>4.3.3</httpcore.version>
@@ -121,7 +120,6 @@
</properties>
<modules>
- <module>hadooplibs</module>
<module>client</module>
<module>core</module>
<module>sharelib</module>
@@ -280,7 +278,7 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
- <version>4.10</version>
+ <version>4.11</version>
<scope>test</scope>
</dependency>
@@ -313,7 +311,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
- <scope>provided</scope>
</dependency>
<dependency>
@@ -324,13 +321,6 @@
</dependency>
<dependency>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-auth</artifactId>
- <version>${hadooplib.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop.auth.version}</version>
@@ -349,6 +339,18 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>${hadoop.auth.version}</version>
<scope>compile</scope>
@@ -443,7 +445,7 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
- <version>${distcp.version}</version>
+ <version>${hadoop.version}</version>
<scope>compile</scope>
<exclusions>
<exclusion>
@@ -456,7 +458,7 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-tools</artifactId>
- <version>${distcp.version}</version>
+ <version>${hadoop.version}</version>
<scope>compile</scope>
<exclusions>
<exclusion>
@@ -467,13 +469,6 @@
</dependency>
<dependency>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-distcp</artifactId>
- <version>${hadooplib.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
<groupId>org.apache.hive.hcatalog</groupId>
<artifactId>hive-webhcat-java-client</artifactId>
<version>${hive.version}</version>
@@ -1350,6 +1345,18 @@
</dependency>
<dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-core</artifactId>
+ <version>1.10.19</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.hamcrest</groupId>
+ <artifactId>hamcrest-all</artifactId>
+ <version>1.3</version>
+ </dependency>
+
+ <dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-core</artifactId>
<version>1.6.4</version>
@@ -1519,12 +1526,6 @@
</dependency>
<dependency>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-utils</artifactId>
- <version>${oozie.hadoop-utils.version}</version>
- </dependency>
-
- <dependency>
<groupId>dk.brics.automaton</groupId>
<artifactId>automaton</artifactId>
<version>1.11-8</version>
@@ -1722,6 +1723,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
+ <version>3.0.1</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<xmlOutput>true</xmlOutput>
@@ -1732,6 +1734,7 @@
<threshold>Low</threshold>
<xmlOutput>true</xmlOutput>
<findbugsXmlOutputDirectory>${project.build.directory}/findbugs</findbugsXmlOutputDirectory>
+ <excludeFilterFile>findbugs-filter.xml</excludeFilterFile>
</configuration>
<executions>
<execution>
@@ -1937,47 +1940,6 @@
</properties>
</profile>
<profile>
- <id>hadoop-1</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <properties>
- <hadoop.version>1.2.1</hadoop.version>
- <hadoop.majorversion>1</hadoop.majorversion>
- <pig.classifier></pig.classifier>
- <sqoop.classifier>hadoop100</sqoop.classifier>
- <hive.version>0.13.1</hive.version>
- <hive.classifier></hive.classifier>
- <hive.jline.version>0.9.94</hive.jline.version>
- </properties>
- </profile>
- <profile>
- <id>hadoop-2</id>
- <activation>
- <activeByDefault>true</activeByDefault>
- </activation>
- <properties>
- <hadoop.version>2.6.0</hadoop.version>
- <hadoop.majorversion>2</hadoop.majorversion>
- <pig.classifier>h2</pig.classifier>
- <sqoop.classifier>hadoop200</sqoop.classifier>
- <jackson.version>1.9.13</jackson.version>
- </properties>
- </profile>
- <profile>
- <id>hadoop-3</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <properties>
- <hadoop.version>3.0.0-SNAPSHOT</hadoop.version>
- <hadoop.majorversion>3</hadoop.majorversion>
- <pig.classifier>h2</pig.classifier>
- <sqoop.classifier>hadoop200</sqoop.classifier>
- <jackson.version>1.9.13</jackson.version>
- </properties>
- </profile>
- <profile>
<id>spark-1</id>
<activation>
<activeByDefault>true</activeByDefault>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index ccf8a12..8964944 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,7 @@
-- Oozie 4.4.0 release (trunk - unreleased)
+OOZIE-1770 Create Oozie Application Master for YARN (pbacsko, rkanter, gezapeti)
+OOZIE-2316 Drop support for Hadoop 1 and 0.23 (asasvari)
OOZIE-2908 Fix typo in oozie.actions.null.args.allowed property in oozie-default.xml (gezapeti)
OOZIE-2888 Upgrade commons-io to 2.4 (dbist13 via pbacsko)
OOZIE-2872 Address backward compatibility issue introduced by OOZIE-2748 (pbacsko)
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/sharelib/distcp/pom.xml
----------------------------------------------------------------------
diff --git a/sharelib/distcp/pom.xml b/sharelib/distcp/pom.xml
index 1f3c0cc..57b1303 100644
--- a/sharelib/distcp/pom.xml
+++ b/sharelib/distcp/pom.xml
@@ -39,8 +39,8 @@
<dependencies>
<dependency>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-distcp</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-distcp</artifactId>
<scope>compile</scope>
</dependency>
@@ -91,18 +91,6 @@
<outputFile>${project.build.directory}/classpath</outputFile>
</configuration>
</execution>
- <execution>
- <id>create-mrapp-generated-classpath</id>
- <phase>generate-test-resources</phase>
- <goals>
- <goal>build-classpath</goal>
- </goals>
- <configuration>
- <!-- needed to run the unit test for DS to generate the required classpath
- that is required in the env of the launch container in the mini mr/yarn cluster -->
- <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
- </configuration>
- </execution>
</executions>
</plugin>
<plugin>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------
diff --git a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
index 65e7c5e..d7cf9d9 100644
--- a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
+++ b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
@@ -53,7 +53,7 @@ public class DistcpMain extends JavaMain {
protected void run(String[] args) throws Exception {
Configuration actionConf = loadActionConf();
- LauncherMainHadoopUtils.killChildYarnJobs(actionConf);
+ LauncherMain.killChildYarnJobs(actionConf);
String logFile = setUpDistcpLog4J(actionConf);
Class<?> klass = actionConf.getClass(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS,
org.apache.hadoop.tools.DistCp.class);