You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by ge...@apache.org on 2017/05/26 09:28:02 UTC
[10/10] oozie git commit: OOZIE-1770 Create Oozie Application Master
for YARN (asasvari, pbacsko, rkanter, gezapeti)
OOZIE-1770 Create Oozie Application Master for YARN (asasvari,pbacsko,rkanter,gezapeti)
Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/21761f5b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/21761f5b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/21761f5b
Branch: refs/heads/master
Commit: 21761f5b5b2b4457302aef780239610d076047e5
Parents: 53b1d1e
Author: Gezapeti Cseh <ge...@gmail.com>
Authored: Fri May 26 11:03:49 2017 +0200
Committer: Gezapeti Cseh <ge...@gmail.com>
Committed: Fri May 26 11:03:49 2017 +0200
----------------------------------------------------------------------
client/pom.xml | 4 +-
.../java/org/apache/oozie/cli/OozieCLI.java | 4 -
.../org/apache/oozie/client/XOozieClient.java | 20 +-
core/pom.xml | 37 +-
.../java/org/apache/oozie/WorkflowJobBean.java | 1 -
.../org/apache/oozie/action/ActionExecutor.java | 2 +-
.../apache/oozie/action/hadoop/Credentials.java | 39 -
.../action/hadoop/CredentialsProvider.java | 92 +-
.../hadoop/CredentialsProviderFactory.java | 116 +++
.../action/hadoop/DistcpActionExecutor.java | 14 +-
.../oozie/action/hadoop/FsActionExecutor.java | 6 +-
.../oozie/action/hadoop/FsELFunctions.java | 5 +-
.../action/hadoop/HCatCredentialHelper.java | 39 +-
.../oozie/action/hadoop/HCatCredentials.java | 14 +-
.../oozie/action/hadoop/HadoopELFunctions.java | 2 +-
.../oozie/action/hadoop/HbaseCredentials.java | 27 +-
.../action/hadoop/Hive2ActionExecutor.java | 10 +-
.../oozie/action/hadoop/Hive2Credentials.java | 13 +-
.../oozie/action/hadoop/HiveActionExecutor.java | 18 +-
.../oozie/action/hadoop/JavaActionExecutor.java | 902 ++++++++-----------
.../oozie/action/hadoop/LauncherHelper.java | 322 +++++++
.../action/hadoop/LauncherMapperHelper.java | 345 -------
.../action/hadoop/MapReduceActionExecutor.java | 179 ++--
.../oozie/action/hadoop/OozieJobInfo.java | 2 -
.../oozie/action/hadoop/PigActionExecutor.java | 37 +-
.../hadoop/ScriptLanguageActionExecutor.java | 3 +-
.../action/hadoop/ShellActionExecutor.java | 35 +-
.../action/hadoop/SparkActionExecutor.java | 34 +-
.../action/hadoop/SqoopActionExecutor.java | 27 +-
.../oozie/client/rest/BulkResponseImpl.java | 25 +-
.../java/org/apache/oozie/command/XCommand.java | 8 +-
.../oozie/command/wf/SubmitHttpXCommand.java | 15 +-
.../oozie/command/wf/SubmitMRXCommand.java | 6 +-
.../wf/SubmitScriptLanguageXCommand.java | 8 +-
.../oozie/command/wf/SubmitSqoopXCommand.java | 6 +-
.../AbstractCoordInputDependency.java | 7 +-
.../hcat/EhcacheHCatDependencyCache.java | 8 +-
.../oozie/service/CallableQueueService.java | 18 +-
.../oozie/service/EventHandlerService.java | 17 +-
.../oozie/service/HadoopAccessorService.java | 167 +++-
.../java/org/apache/oozie/service/Services.java | 16 +-
.../apache/oozie/service/ShareLibService.java | 48 +-
.../apache/oozie/servlet/CallbackServlet.java | 3 +-
.../org/apache/oozie/store/OozieSchema.java | 11 +-
.../org/apache/oozie/util/ClasspathUtils.java | 142 +++
.../java/org/apache/oozie/util/FSUtils.java | 53 ++
.../java/org/apache/oozie/util/IOUtils.java | 49 +-
.../java/org/apache/oozie/util/JobUtils.java | 21 +-
.../src/main/resources/META-INF/persistence.xml | 8 +-
core/src/main/resources/oozie-default.xml | 33 -
core/src/main/resources/oozie-log4j.properties | 4 +-
.../java/org/apache/oozie/QueryServlet.java | 40 +
.../oozie/action/hadoop/BlockingMapper.java | 52 ++
.../oozie/action/hadoop/CredentialForTest.java | 15 +-
.../oozie/action/hadoop/InsertTestToken.java | 14 +-
.../oozie/action/hadoop/LauncherMainTester.java | 1 +
.../action/hadoop/MapperReducerForTest.java | 10 +-
.../action/hadoop/TestDistCpActionExecutor.java | 29 +-
.../action/hadoop/TestFSPrepareActions.java | 10 +-
.../action/hadoop/TestFsActionExecutor.java | 4 +-
.../action/hadoop/TestHCatCredentials.java | 34 +-
.../action/hadoop/TestHCatPrepareActions.java | 2 +-
.../action/hadoop/TestJavaActionExecutor.java | 886 ++----------------
.../oozie/action/hadoop/TestJavaMain.java | 2 +-
.../oozie/action/hadoop/TestLauncher.java | 158 ++--
.../hadoop/TestLauncherAMCallbackNotifier.java | 204 +++++
.../action/hadoop/TestMapReduceActionError.java | 173 ----
.../oozie/action/hadoop/TestOozieJobInfo.java | 23 +-
.../action/hadoop/TestPrepareActionsDriver.java | 19 +-
.../action/hadoop/TestShellActionExecutor.java | 78 +-
.../oozie/action/hadoop/TestShellMain.java | 5 +-
.../org/apache/oozie/client/TestOozieCLI.java | 6 +-
.../oozie/client/TestWorkflowXClient.java | 12 +-
.../command/coord/TestCoordChangeXCommand.java | 7 +
.../apache/oozie/command/wf/HangServlet.java | 19 +-
.../command/wf/TestActionCheckXCommand.java | 42 +-
.../command/wf/TestActionStartXCommand.java | 43 +-
.../command/wf/TestSubmitHiveXCommand.java | 8 +-
.../oozie/command/wf/TestSubmitMRXCommand.java | 8 +-
.../oozie/command/wf/TestSubmitPigXCommand.java | 14 +-
.../command/wf/TestSubmitSqoopXCommand.java | 6 +-
.../wf/TestWorkflowActionKillXCommand.java | 47 +-
.../oozie/service/TestConfigurationService.java | 7 +-
.../service/TestHadoopAccessorService.java | 115 ++-
.../oozie/service/TestRecoveryService.java | 35 +-
.../oozie/service/TestShareLibService.java | 55 +-
.../org/apache/oozie/test/XDataTestCase.java | 2 +-
.../java/org/apache/oozie/test/XFsTestCase.java | 28 +
.../java/org/apache/oozie/test/XTestCase.java | 63 +-
.../apache/oozie/util/TestClasspathUtils.java | 107 +++
.../lite/TestLiteWorkflowAppParser.java | 1 -
distro/src/main/bin/addtowar.sh | 20 +-
docs/src/site/twiki/DG_QuickStart.twiki | 6 +-
docs/src/site/twiki/ENG_Building.twiki | 12 +-
findbugs-filter.xml | 32 +
hadooplibs/hadoop-auth-1/pom.xml | 43 -
hadooplibs/hadoop-auth-2/pom.xml | 43 -
hadooplibs/hadoop-distcp-1/pom.xml | 43 -
hadooplibs/hadoop-distcp-2/pom.xml | 43 -
hadooplibs/hadoop-distcp-3/pom.xml | 43 -
hadooplibs/hadoop-utils-1/pom.xml | 41 -
.../action/hadoop/LauncherMainHadoopUtils.java | 40 -
.../apache/oozie/hadoop/utils/HadoopShims.java | 51 --
hadooplibs/hadoop-utils-2/pom.xml | 42 -
.../action/hadoop/LauncherMainHadoopUtils.java | 131 ---
.../apache/oozie/hadoop/utils/HadoopShims.java | 68 --
hadooplibs/hadoop-utils-3/pom.xml | 42 -
.../action/hadoop/LauncherMainHadoopUtils.java | 131 ---
.../apache/oozie/hadoop/utils/HadoopShims.java | 68 --
hadooplibs/pom.xml | 82 --
pom.xml | 106 +--
release-log.txt | 2 +
sharelib/distcp/pom.xml | 16 +-
.../apache/oozie/action/hadoop/DistcpMain.java | 2 +-
sharelib/hcatalog/pom.xml | 12 -
sharelib/hive/pom.xml | 24 +-
.../apache/oozie/action/hadoop/HiveMain.java | 20 +-
.../action/hadoop/TestHiveActionExecutor.java | 56 +-
.../oozie/action/hadoop/TestHiveMain.java | 4 +-
sharelib/hive2/pom.xml | 18 +-
.../apache/oozie/action/hadoop/Hive2Main.java | 21 +-
.../action/hadoop/TestHive2ActionExecutor.java | 118 +--
sharelib/oozie/pom.xml | 24 +-
.../action/hadoop/AMRMCallBackHandler.java | 72 ++
.../action/hadoop/AMRMClientAsyncFactory.java | 32 +
.../apache/oozie/action/hadoop/ActionUtils.java | 49 +
.../apache/oozie/action/hadoop/ErrorHolder.java | 56 ++
.../oozie/action/hadoop/HdfsOperations.java | 124 +++
.../apache/oozie/action/hadoop/JavaMain.java | 15 +-
.../apache/oozie/action/hadoop/LauncherAM.java | 614 +++++++++++++
.../hadoop/LauncherAMCallbackNotifier.java | 177 ++++
.../LauncherAMCallbackNotifierFactory.java | 27 +
.../oozie/action/hadoop/LauncherMain.java | 150 ++-
.../oozie/action/hadoop/LauncherMapper.java | 67 +-
.../oozie/action/hadoop/LocalFsOperations.java | 100 ++
.../oozie/action/hadoop/MapReduceMain.java | 38 +-
.../action/hadoop/PrepareActionsDriver.java | 51 +-
.../action/hadoop/PrepareActionsHandler.java | 100 ++
.../hadoop/SequenceFileWriterFactory.java | 35 +
.../apache/oozie/action/hadoop/ShellMain.java | 8 +-
.../action/hadoop/LauncherAMTestMainClass.java | 48 +
.../oozie/action/hadoop/TestHdfsOperations.java | 116 +++
.../oozie/action/hadoop/TestLauncherAM.java | 641 +++++++++++++
sharelib/pig/pom.xml | 22 +-
.../org/apache/oozie/action/hadoop/PigMain.java | 9 +-
.../oozie/action/hadoop/PigMainWithOldAPI.java | 4 +-
.../action/hadoop/TestPigActionExecutor.java | 120 +--
.../action/hadoop/TestPigMainWithOldAPI.java | 5 +-
sharelib/spark/pom.xml | 232 ++---
.../apache/oozie/action/hadoop/SparkMain.java | 13 +-
.../apache/oozie/action/hadoop/TestPyspark.java | 19 +-
.../action/hadoop/TestSparkActionExecutor.java | 21 +-
sharelib/sqoop/pom.xml | 17 -
.../apache/oozie/action/hadoop/SqoopMain.java | 13 +-
.../action/hadoop/TestSqoopActionExecutor.java | 88 +-
sharelib/streaming/pom.xml | 6 +
.../oozie/action/hadoop/StreamingMain.java | 4 +-
.../hadoop/TestMapReduceActionExecutor.java | 390 ++++----
.../apache/oozie/tools/OozieSharelibCLI.java | 33 +-
webapp/pom.xml | 8 +-
160 files changed, 5465 insertions(+), 4869 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index f99366c..84e2f98 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -74,8 +74,8 @@
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-auth</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/oozie/cli/OozieCLI.java b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
index 38fb84e..4adf1a8 100644
--- a/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
+++ b/client/src/main/java/org/apache/oozie/cli/OozieCLI.java
@@ -1845,10 +1845,6 @@ public class OozieCLI {
private void slaCommand(CommandLine commandLine) throws IOException, OozieCLIException {
XOozieClient wc = createXOozieClient(commandLine);
- List<String> options = new ArrayList<String>();
- for (Option option : commandLine.getOptions()) {
- options.add(option.getOpt());
- }
String s = commandLine.getOptionValue(OFFSET_OPTION);
int start = Integer.parseInt((s != null) ? s : "0");
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/client/src/main/java/org/apache/oozie/client/XOozieClient.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/oozie/client/XOozieClient.java b/client/src/main/java/org/apache/oozie/client/XOozieClient.java
index 8538ec7..379819b 100644
--- a/client/src/main/java/org/apache/oozie/client/XOozieClient.java
+++ b/client/src/main/java/org/apache/oozie/client/XOozieClient.java
@@ -33,19 +33,10 @@ import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
public class XOozieClient extends OozieClient {
-
- public static final String JT = "mapred.job.tracker";
- public static final String JT_2 = "mapreduce.jobtracker.address";
-
+ public static final String RM = "yarn.resourcemanager.address";
public static final String NN = "fs.default.name";
public static final String NN_2 = "fs.defaultFS";
- @Deprecated
- public static final String JT_PRINCIPAL = "mapreduce.jobtracker.kerberos.principal";
-
- @Deprecated
- public static final String NN_PRINCIPAL = "dfs.namenode.kerberos.principal";
-
public static final String PIG_SCRIPT = "oozie.pig.script";
public static final String PIG_OPTIONS = "oozie.pig.options";
@@ -123,12 +114,9 @@ public class XOozieClient extends OozieClient {
}
private void validateHttpSubmitConf(Properties conf) {
- String JT = conf.getProperty(XOozieClient.JT);
- String JT_2 = conf.getProperty(XOozieClient.JT_2);
- if (JT == null) {
- if(JT_2 == null) {
- throw new RuntimeException("jobtracker is not specified in conf");
- }
+ String RM = conf.getProperty(XOozieClient.RM);
+ if (RM == null) {
+ throw new RuntimeException("Resource manager is not specified in conf");
}
String NN = conf.getProperty(XOozieClient.NN);
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 14aa034..e48dcd9 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -92,6 +92,22 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>
@@ -182,8 +198,8 @@
</dependency>
<dependency>
- <groupId>org.apache.oozie</groupId>
- <artifactId>oozie-hadoop-distcp</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-distcp</artifactId>
<scope>test</scope>
</dependency>
@@ -512,23 +528,6 @@
</configuration>
</plugin>
<plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
- <execution>
- <id>create-mrapp-generated-classpath</id>
- <phase>generate-test-resources</phase>
- <goals>
- <goal>build-classpath</goal>
- </goals>
- <configuration>
- <!-- needed to run the unit test for DS to generate the required classpath
- that is required in the env of the launch container in the mini mr/yarn cluster -->
- <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
<groupId>org.apache.openjpa</groupId>
<artifactId>openjpa-maven-plugin</artifactId>
<executions>
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/WorkflowJobBean.java b/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
index 55d79a5..2042063 100644
--- a/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
+++ b/core/src/main/java/org/apache/oozie/WorkflowJobBean.java
@@ -462,7 +462,6 @@ public class WorkflowJobBean implements Writable, WorkflowJob, JsonBean {
return pInstance;
}
- @SuppressWarnings("unchecked")
public JSONObject toJSONObject() {
return toJSONObject("GMT");
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/ActionExecutor.java b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
index 1d6456b..919509d 100644
--- a/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/ActionExecutor.java
@@ -596,7 +596,7 @@ public abstract class ActionExecutor {
* @param action the action
* @return the action yarn tag
*/
- public String getActionYarnTag(Configuration conf, WorkflowJob wfJob, WorkflowAction action) {
+ public static String getActionYarnTag(Configuration conf, WorkflowJob wfJob, WorkflowAction action) {
if (conf.get(OOZIE_ACTION_YARN_TAG) != null) {
return conf.get(OOZIE_ACTION_YARN_TAG) + "@" + action.getName();
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/Credentials.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/Credentials.java b/core/src/main/java/org/apache/oozie/action/hadoop/Credentials.java
deleted file mode 100644
index eadb47b..0000000
--- a/core/src/main/java/org/apache/oozie/action/hadoop/Credentials.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.oozie.action.ActionExecutor.Context;
-
-@SuppressWarnings("deprecation")
-public abstract class Credentials {
-
- /**
- * This is the interface for all the Credentials implementation. Any new credential implementaion must implement
- * this function. This function should modify the jobconf which will be used further to pass the credentials to the
- * tasks while running it. Credentials properties and context is also provided by that user can get all the
- * necessary configuration.
- *
- * @param jobconf job conf where the token will insert into
- * @param props properties for getting credential token or certificate
- * @param context workflow context
- * @throws Exception thrown if failed
- */
- public abstract void addtoJobConf(JobConf jobconf, CredentialsProperties props, Context context) throws Exception;
-}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProvider.java b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProvider.java
index 6fe22fb..d49da90 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProvider.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProvider.java
@@ -19,86 +19,24 @@
package org.apache.oozie.action.hadoop;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.Services;
-import org.apache.oozie.util.XLog;
+import org.apache.hadoop.security.Credentials;
+import org.apache.oozie.action.ActionExecutor.Context;
-import java.io.IOException;
+public interface CredentialsProvider {
-public class CredentialsProvider {
- Credentials cred;
- String type;
- public static final String CRED_KEY = "oozie.credentials.credentialclasses";
- private static final XLog LOG = XLog.getLog(CredentialsProvider.class);
-
- public CredentialsProvider(String type) {
- this.type = type;
- this.cred = null;
- LOG.debug("Credentials Provider is created for Type: " + type);
- }
-
- /**
- * Create Credential object
- *
- * @return Credential object
- * @throws Exception
- */
- public Credentials createCredentialObject() throws Exception {
- String type;
- String classname;
- for (String function : ConfigurationService.getStrings(CRED_KEY)) {
- function = Trim(function);
- LOG.debug("Creating Credential class for : " + function);
- String[] str = function.split("=");
- if (str.length > 0) {
- type = str[0];
- classname = str[1];
- if (classname != null) {
- LOG.debug("Creating Credential type : '" + type + "', class Name : '" + classname + "'");
- if (this.type.equalsIgnoreCase(str[0])) {
- Class<?> klass = null;
- try {
- klass = Thread.currentThread().getContextClassLoader().loadClass(classname);
- }
- catch (ClassNotFoundException ex) {
- LOG.warn("Exception while loading the class", ex);
- throw ex;
- }
-
- cred = (Credentials) ReflectionUtils.newInstance(klass, null);
- }
- }
- }
- }
- return cred;
- }
-
- /**
- * Relogs into Kerberos using the Keytab for the Oozie server user. This should be called before attempting to get delegation
- * tokens via {@link Credentials} implementations to ensure that the Kerberos credentials are current and won't expire too soon.
+ /**
+ * This is the interface for all the Credentials implementation. Any new credential implementation must implement
+ * this function. This function should modify the configuration which will be used further to pass the credentials to the
+ * tasks while running it. Credentials properties and context is also provided by that user can get all the
+ * necessary configuration.
*
- * @throws IOException
- */
- public static void ensureKerberosLogin() throws IOException {
- LOG.debug("About to relogin from keytab");
- UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
- LOG.debug("Relogin from keytab successful");
- }
-
- /**
- * To trim string
+ * @param credentials the credentials object which is updated
+ * @param config launcher AM configuration
+ * @param props properties for getting credential token or certificate
+ * @param context workflow context
+ * @throws Exception thrown if failed
*
- * @param str
- * @return trim string
*/
- public String Trim(String str) {
- if (str != null) {
- str = str.replaceAll("\\n", "");
- str = str.replaceAll("\\t", "");
- str = str.trim();
- }
- return str;
- }
+ public void updateCredentials(Credentials credentials, Configuration config, CredentialsProperties props, Context context)
+ throws Exception;
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
new file mode 100644
index 0000000..ddffc79
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/CredentialsProviderFactory.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.action.hadoop;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.oozie.service.ConfigurationService;
+import org.apache.oozie.util.XLog;
+
+public class CredentialsProviderFactory {
+ public static final String CRED_KEY = "oozie.credentials.credentialclasses";
+ private static final XLog LOG = XLog.getLog(CredentialsProviderFactory.class);
+ private static CredentialsProviderFactory instance;
+ private final Map<String, Class<CredentialsProvider>> providerCache;
+
+ @VisibleForTesting
+ static void destroy() {
+ instance = null;
+ }
+
+ public static CredentialsProviderFactory getInstance() throws Exception {
+ if(instance == null) {
+ instance = new CredentialsProviderFactory();
+ }
+ return instance;
+ }
+
+ private CredentialsProviderFactory() throws Exception {
+ providerCache = new HashMap<>();
+ for (String function : ConfigurationService.getStrings(CRED_KEY)) {
+ function = trim(function);
+ LOG.debug("Creating Credential class for : " + function);
+ String[] str = function.split("=");
+ if (str.length > 0) {
+ String type = str[0];
+ String classname = str[1];
+ if (classname != null) {
+ LOG.debug("Creating Credential type : '{0}', class Name : '{1}'", type, classname);
+ Class<?> klass = null;
+ try {
+ klass = Thread.currentThread().getContextClassLoader().loadClass(classname);
+ }
+ catch (ClassNotFoundException ex) {
+ LOG.warn("Exception while loading the class '{0}'", classname, ex);
+ throw ex;
+ }
+ providerCache.put(type, (Class<CredentialsProvider>) klass);
+ } else {
+ LOG.warn("Credential provider class is null for '{0}', skipping", type);
+ }
+ }
+ }
+ }
+
+ /**
+ * Create Credential object
+ *
+ * @return Credential object
+ * @throws Exception
+ */
+ public CredentialsProvider createCredentialsProvider(String type) throws Exception {
+ Class<CredentialsProvider> providerClass = providerCache.get(type);
+ if(providerClass == null){
+ return null;
+ }
+ return providerClass.newInstance();
+ }
+
+ /**
+ * Relogs into Kerberos using the Keytab for the Oozie server user. This should be called before attempting to get delegation
+ * tokens via {@link CredentialsProvider} implementations to ensure that the Kerberos credentials are current and won't expire
+ * too soon.
+ *
+ * @throws IOException
+ */
+ public static void ensureKerberosLogin() throws IOException {
+ LOG.debug("About to relogin from keytab");
+ UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
+ LOG.debug("Relogin from keytab successful");
+ }
+
+ /**
+ * To trim string
+ *
+ * @param str
+ * @return trim string
+ */
+ public String trim(String str) {
+ if (str != null) {
+ str = str.replaceAll("\\n", "");
+ str = str.replaceAll("\\t", "");
+ str = str.trim();
+ }
+ return str;
+ }
+}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
index 78cd257..20f47d5 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/DistcpActionExecutor.java
@@ -43,18 +43,13 @@ public class DistcpActionExecutor extends JavaActionExecutor{
Configuration setupActionConf(Configuration actionConf, Context context, Element actionXml, Path appPath)
throws ActionExecutorException {
actionConf = super.setupActionConf(actionConf, context, actionXml, appPath);
- String classNameDistcp = CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS;
- String name = getClassNamebyType(DISTCP_TYPE);
- if(name != null){
- classNameDistcp = name;
- }
actionConf.set(JavaMain.JAVA_MAIN_CLASS, DISTCP_MAIN_CLASS_NAME);
return actionConf;
}
@Override
- public List<Class> getLauncherClasses() {
- List<Class> classes = new ArrayList<Class>();
+ public List<Class<?>> getLauncherClasses() {
+ List<Class<?>> classes = new ArrayList<Class<?>>();
try {
classes.add(Class.forName(CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS));
}
@@ -112,6 +107,11 @@ public class DistcpActionExecutor extends JavaActionExecutor{
}
@Override
+ protected boolean needToAddMapReduceToClassPath() {
+ return true;
+ }
+
+ @Override
protected String getLauncherMain(Configuration launcherConf, Element actionXml) {
return launcherConf.get(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS, CONF_OOZIE_DISTCP_ACTION_MAIN_CLASS);
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
index 2765474..7f7c676 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/FsActionExecutor.java
@@ -267,7 +267,7 @@ public class FsActionExecutor extends ActionExecutor {
FileStatus pathStatus = fs.getFileStatus(path);
List<Path> paths = new ArrayList<Path>();
- if (dirFiles && pathStatus.isDir()) {
+ if (dirFiles && pathStatus.isDirectory()) {
if (isRoot) {
paths.add(path);
}
@@ -275,7 +275,7 @@ public class FsActionExecutor extends ActionExecutor {
for (int i = 0; i < filesStatus.length; i++) {
Path p = filesStatus[i].getPath();
paths.add(p);
- if (recursive && filesStatus[i].isDir()) {
+ if (recursive && filesStatus[i].isDirectory()) {
recursiveFsOperation(op, fs, null, p, argsMap, dirFiles, recursive, false);
}
}
@@ -556,7 +556,7 @@ public class FsActionExecutor extends ActionExecutor {
FileStatus st;
if (fs.exists(path)) {
st = fs.getFileStatus(path);
- if (st.isDir()) {
+ if (st.isDirectory()) {
throw new Exception(path.toString() + " is a directory");
} else if (st.getLen() != 0) {
throw new Exception(path.toString() + " must be a zero-length file");
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java b/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
index 801bfe6..c16f560 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/FsELFunctions.java
@@ -43,7 +43,6 @@ public class FsELFunctions {
private static FileSystem getFileSystem(URI uri) throws HadoopAccessorException {
WorkflowJob workflow = DagELFunctions.getWorkflow();
String user = workflow.getUser();
- String group = workflow.getGroup();
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
JobConf conf = has.createJobConf(uri.getAuthority());
return has.createFileSystem(user, uri, conf);
@@ -98,7 +97,7 @@ public class FsELFunctions {
boolean isDir = false;
FileStatus fileStatus = getFileStatus(pathUri);
if (fileStatus != null) {
- isDir = fileStatus.isDir();
+ isDir = fileStatus.isDirectory();
}
return isDir;
}
@@ -138,7 +137,7 @@ public class FsELFunctions {
size = 0;
if (stati != null) {
for (FileStatus status : stati) {
- if (!status.isDir()) {
+ if (!status.isDirectory()) {
size += status.getLen();
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentialHelper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentialHelper.java b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentialHelper.java
index 36ad1df..9804c7b 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentialHelper.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentialHelper.java
@@ -18,14 +18,14 @@
package org.apache.oozie.action.hadoop;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hive.hcatalog.api.HCatClient;
import org.apache.hive.hcatalog.common.HCatException;
import org.apache.oozie.util.XLog;
@@ -33,7 +33,7 @@ import org.apache.oozie.util.XLog;
/**
* Helper class to handle the HCat credentials
* Performs internally the heavy-lifting of fetching delegation tokens from Hive Metastore, abstracted from the user
- * Token is added to jobConf
+ * Token is added to the credentials
*/
public class HCatCredentialHelper {
@@ -45,28 +45,29 @@ public class HCatCredentialHelper {
private static final String HADOOP_RPC_PROTECTION = "hadoop.rpc.protection";
/**
- * This Function will set the HCat token to jobconf
- * @param launcherJobConf - job conf
+ * This Function will set the HCat token to the credentials
+ * @param credentials - the credentials
+ * @param launcherConfig - launcher configuration
* @param principal - principal for HCat server
* @param server - Serevr URI for HCat server
* @throws Exception
*/
- public void set(JobConf launcherJobConf, String principal, String server) throws Exception {
+ public void set(Credentials credentials, Configuration launcherConfig, String principal, String server) throws Exception {
HCatClient client = null;
try {
- client = getHCatClient(launcherJobConf, principal, server);
+ client = getHCatClient(launcherConfig, principal, server);
XLog.getLog(getClass()).debug(
- "HCatCredentialHelper: set: User name for which token will be asked from HCat: "
- + launcherJobConf.get(USER_NAME));
- String tokenStrForm = client.getDelegationToken(launcherJobConf.get(USER_NAME), UserGroupInformation
+ "HCatCredentialHelper: set: User name for which token will be asked from HCat: {0}",
+ launcherConfig.get(USER_NAME));
+ String tokenStrForm = client.getDelegationToken(launcherConfig.get(USER_NAME), UserGroupInformation
.getLoginUser().getShortUserName());
Token<DelegationTokenIdentifier> hcatToken = new Token<DelegationTokenIdentifier>();
hcatToken.decodeFromUrlString(tokenStrForm);
- launcherJobConf.getCredentials().addToken(new Text("HCat Token"), hcatToken);
- XLog.getLog(getClass()).debug("Added the HCat token in job conf");
+ credentials.addToken(new Text("HCat Token"), hcatToken);
+ XLog.getLog(getClass()).debug("Added the HCat token to launcher configuration");
}
catch (Exception ex) {
- XLog.getLog(getClass()).debug("set Exception" + ex.getMessage());
+ XLog.getLog(getClass()).debug("set Exception {0}", ex.getMessage());
throw ex;
}
finally {
@@ -78,28 +79,28 @@ public class HCatCredentialHelper {
/**
* Getting the HCat client.
- * @param launcherJobConf
+ * @param launcherConfig
* @param principal
* @param server
* @return HCatClient
* @throws HCatException
*/
- public HCatClient getHCatClient(JobConf launcherJobConf,
+ public HCatClient getHCatClient(Configuration launcherConfig,
String principal, String server) throws HCatException {
HiveConf hiveConf = null;
HCatClient hiveclient = null;
hiveConf = new HiveConf();
- XLog.getLog(getClass()).debug("getHCatClient: Principal: " + principal + " Server: " + server);
+ XLog.getLog(getClass()).debug("getHCatClient: Principal: {0} Server: {1}", principal, server);
// specified a thrift url
hiveConf.set(HIVE_METASTORE_SASL_ENABLED, "true");
hiveConf.set(HIVE_METASTORE_KERBEROS_PRINCIPAL, principal);
hiveConf.set(HIVE_METASTORE_LOCAL, "false");
hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, server);
- String protection = launcherJobConf.get(HADOOP_RPC_PROTECTION,
+ String protection = launcherConfig.get(HADOOP_RPC_PROTECTION,
SaslRpcServer.QualityOfProtection.AUTHENTICATION.name()
.toLowerCase());
- XLog.getLog(getClass()).debug("getHCatClient, setting rpc protection to " + protection);
+ XLog.getLog(getClass()).debug("getHCatClient, setting rpc protection to {0}", protection);
hiveConf.set(HADOOP_RPC_PROTECTION, protection);
hiveclient = HCatClient.create(hiveConf);
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentials.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentials.java b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentials.java
index d7689a9..52abbf1 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentials.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HCatCredentials.java
@@ -21,16 +21,13 @@ package org.apache.oozie.action.hadoop;
import java.util.HashMap;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.Credentials;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.action.ActionExecutor.Context;
import org.apache.oozie.service.HCatAccessorService;
import org.apache.oozie.service.Services;
import org.apache.oozie.util.XLog;
-import com.google.common.annotations.VisibleForTesting;
-
/**
* Credentials implementation to store in jobConf, HCat-specific properties such as Principal and Uri
* User specifies these credential properties along with the action configuration
@@ -39,7 +36,7 @@ import com.google.common.annotations.VisibleForTesting;
* User can extend the parent class to implement own class as well
* for handling custom token-based credentials and add to the above server property
*/
-public class HCatCredentials extends Credentials {
+public class HCatCredentials implements CredentialsProvider {
private static final String HCAT_METASTORE_PRINCIPAL = "hcat.metastore.principal";
private static final String HCAT_METASTORE_URI = "hcat.metastore.uri";
@@ -54,7 +51,8 @@ public class HCatCredentials extends Credentials {
* @see org.apache.oozie.action.hadoop.Credentials#addtoJobConf(org.apache.hadoop.mapred.JobConf, org.apache.oozie.action.hadoop.CredentialsProperties, org.apache.oozie.action.ActionExecutor.Context)
*/
@Override
- public void addtoJobConf(JobConf jobconf, CredentialsProperties props, Context context) throws Exception {
+ public void updateCredentials(Credentials credentials, Configuration config, CredentialsProperties props,
+ Context context) throws Exception {
try {
String principal = getProperty(props.getProperties(), HCAT_METASTORE_PRINCIPAL, HIVE_METASTORE_PRINCIPAL);
@@ -69,7 +67,7 @@ public class HCatCredentials extends Credentials {
HCAT_METASTORE_URI + " is required to get hcat credential");
}
HCatCredentialHelper hcch = new HCatCredentialHelper();
- hcch.set(jobconf, principal, server);
+ hcch.set(credentials, config, principal, server);
}
catch (Exception e) {
XLog.getLog(getClass()).warn("Exception in addtoJobConf", e);
@@ -102,4 +100,6 @@ public class HCatCredentials extends Credentials {
}
return value;
}
+
+
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
index ad2a71d..777c187 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
@@ -76,7 +76,7 @@ public class HadoopELFunctions {
if (jsonCounters == null) {
throw new IllegalArgumentException(XLog.format("Hadoop counters not available for action [{0}]", nodeName));
}
- return (Map) JSONValue.parse(jsonCounters);
+ return (Map<String, Map<String, Long>>) JSONValue.parse(jsonCounters);
}
}
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/HbaseCredentials.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HbaseCredentials.java b/core/src/main/java/org/apache/oozie/action/hadoop/HbaseCredentials.java
index 307f565..22b6dc9 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HbaseCredentials.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HbaseCredentials.java
@@ -27,14 +27,11 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier;
import org.apache.hadoop.hbase.security.token.TokenUtil;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.oozie.action.ActionExecutor.Context;
-import org.apache.oozie.action.hadoop.Credentials;
-import org.apache.oozie.action.hadoop.CredentialsProperties;
-import org.apache.oozie.util.XLog;
+import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.oozie.action.ActionExecutor.Context;
+import org.apache.oozie.util.XLog;
/**
@@ -43,17 +40,16 @@ import org.apache.hadoop.security.token.TokenIdentifier;
* Oozie server should be configured to use this Credentials class by including it via property 'oozie.credentials.credentialclasses'
*
*/
-public class HbaseCredentials extends Credentials {
-
-
+public class HbaseCredentials implements CredentialsProvider {
/* (non-Javadoc)
* @see org.apache.oozie.action.hadoop.Credentials#addtoJobConf(org.apache.hadoop.mapred.JobConf, org.apache.oozie.action.hadoop.CredentialsProperties, org.apache.oozie.action.ActionExecutor.Context)
*/
@Override
- public void addtoJobConf(JobConf jobConf, CredentialsProperties props, Context context) throws Exception {
+ public void updateCredentials(Credentials credentials, Configuration config, CredentialsProperties props,
+ Context context) throws Exception {
try {
- copyHbaseConfToJobConf(jobConf, props);
- obtainToken(jobConf, context);
+ copyHbaseConfToJobConf(config, props);
+ obtainToken(credentials, config, context);
}
catch (Exception e) {
XLog.getLog(getClass()).warn("Exception in receiving hbase credentials", e);
@@ -61,7 +57,7 @@ public class HbaseCredentials extends Credentials {
}
}
- void copyHbaseConfToJobConf(JobConf jobConf, CredentialsProperties props) {
+ void copyHbaseConfToJobConf(Configuration jobConf, CredentialsProperties props) {
// Create configuration using hbase-site.xml/hbase-default.xml
Configuration hbaseConf = new Configuration(false);
HBaseConfiguration.addHbaseResources(hbaseConf);
@@ -74,7 +70,8 @@ public class HbaseCredentials extends Credentials {
injectConf(hbaseConf, jobConf);
}
- private void obtainToken(final JobConf jobConf, Context context) throws IOException, InterruptedException {
+ private void obtainToken(Credentials credentials, final Configuration jobConf, Context context)
+ throws IOException, InterruptedException {
String user = context.getWorkflow().getUser();
UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
User u = User.create(ugi);
@@ -87,7 +84,7 @@ public class HbaseCredentials extends Credentials {
}
}
);
- jobConf.getCredentials().addToken(token.getService(), token);
+ credentials.addToken(token.getService(), token);
}
private void addPropsConf(CredentialsProperties props, Configuration destConf) {
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
index 2aed936..35277ae 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2ActionExecutor.java
@@ -47,8 +47,8 @@ public class Hive2ActionExecutor extends ScriptLanguageActionExecutor {
}
@Override
- public List<Class> getLauncherClasses() {
- List<Class> classes = new ArrayList<Class>();
+ public List<Class<?>> getLauncherClasses() {
+ List<Class<?>> classes = new ArrayList<Class<?>>();
try {
classes.add(Class.forName(HIVE2_MAIN_CLASS_NAME));
}
@@ -106,7 +106,7 @@ public class Hive2ActionExecutor extends ScriptLanguageActionExecutor {
for (int i = 0; i < params.size(); i++) {
strParams[i] = params.get(i).getTextTrim();
}
- MapReduceMain.setStrings(conf, HIVE2_PARAMS, strParams);
+ ActionUtils.setStrings(conf, HIVE2_PARAMS, strParams);
String[] strArgs = null;
List<Element> eArgs = actionXml.getChildren("argument", ns);
@@ -116,12 +116,12 @@ public class Hive2ActionExecutor extends ScriptLanguageActionExecutor {
strArgs[i] = eArgs.get(i).getTextTrim();
}
}
- MapReduceMain.setStrings(conf, HIVE2_ARGS, strArgs);
+ ActionUtils.setStrings(conf, HIVE2_ARGS, strArgs);
return conf;
}
- /**
+ /**
* Return the sharelib name for the action.
*
* @return returns <code>hive2</code>.
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2Credentials.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2Credentials.java b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2Credentials.java
index bd2f1f7..0b495f7 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/Hive2Credentials.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/Hive2Credentials.java
@@ -20,9 +20,11 @@ package org.apache.oozie.action.hadoop;
import java.sql.Connection;
import java.sql.DriverManager;
+
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.oozie.ErrorCode;
@@ -37,14 +39,15 @@ import org.apache.oozie.util.XLog;
* User can extend the parent class to implement own class as well
* for handling custom token-based credentials and add to the above server property
*/
-public class Hive2Credentials extends Credentials {
+public class Hive2Credentials implements CredentialsProvider {
private static final String USER_NAME = "user.name";
private static final String HIVE2_JDBC_URL = "hive2.jdbc.url";
private static final String HIVE2_SERVER_PRINCIPAL = "hive2.server.principal";
@Override
- public void addtoJobConf(JobConf jobconf, CredentialsProperties props, Context context) throws Exception {
+ public void updateCredentials(Credentials credentials, Configuration config, CredentialsProperties props,
+ Context context) throws Exception {
try {
// load the driver
Class.forName("org.apache.hive.jdbc.HiveDriver");
@@ -66,7 +69,7 @@ public class Hive2Credentials extends Credentials {
con = DriverManager.getConnection(url);
XLog.getLog(getClass()).debug("Connected successfully to " + url);
// get delegation token for the given proxy user
- tokenStr = ((HiveConnection)con).getDelegationToken(jobconf.get(USER_NAME), principal);
+ tokenStr = ((HiveConnection)con).getDelegationToken(config.get(USER_NAME), principal);
} finally {
if (con != null) {
con.close();
@@ -76,7 +79,7 @@ public class Hive2Credentials extends Credentials {
Token<DelegationTokenIdentifier> hive2Token = new Token<DelegationTokenIdentifier>();
hive2Token.decodeFromUrlString(tokenStr);
- jobconf.getCredentials().addToken(new Text("hive.server2.delegation.token"), hive2Token);
+ credentials.addToken(new Text("hive.server2.delegation.token"), hive2Token);
XLog.getLog(getClass()).debug("Added the Hive Server 2 token in job conf");
}
catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
index 8e2453e..22378fc 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HiveActionExecutor.java
@@ -20,7 +20,6 @@ package org.apache.oozie.action.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.client.XOozieClient;
import org.apache.oozie.service.ConfigurationService;
@@ -49,8 +48,8 @@ public class HiveActionExecutor extends ScriptLanguageActionExecutor {
}
@Override
- public List<Class> getLauncherClasses() {
- List<Class> classes = new ArrayList<Class>();
+ public List<Class<?>> getLauncherClasses() {
+ List<Class<?>> classes = new ArrayList<Class<?>>();
try {
classes.add(Class.forName(HIVE_MAIN_CLASS_NAME));
}
@@ -98,7 +97,7 @@ public class HiveActionExecutor extends ScriptLanguageActionExecutor {
for (int i = 0; i < params.size(); i++) {
strParams[i] = params.get(i).getTextTrim();
}
- MapReduceMain.setStrings(conf, HIVE_PARAMS, strParams);
+ ActionUtils.setStrings(conf, HIVE_PARAMS, strParams);
String[] strArgs = null;
List<Element> eArgs = actionXml.getChildren("argument", ns);
@@ -108,7 +107,7 @@ public class HiveActionExecutor extends ScriptLanguageActionExecutor {
strArgs[i] = eArgs.get(i).getTextTrim();
}
}
- MapReduceMain.setStrings(conf, HIVE_ARGS, strArgs);
+ ActionUtils.setStrings(conf, HIVE_ARGS, strArgs);
return conf;
}
@@ -133,10 +132,15 @@ public class HiveActionExecutor extends ScriptLanguageActionExecutor {
}
@Override
- protected JobConf loadHadoopDefaultResources(Context context, Element actionXml) {
+ protected boolean needToAddMapReduceToClassPath() {
+ return true;
+ }
+
+ @Override
+ protected Configuration loadHadoopDefaultResources(Context context, Element actionXml) {
boolean loadDefaultResources = ConfigurationService
.getBoolean(HadoopAccessorService.ACTION_CONFS_LOAD_DEFAULT_RESOURCES);
- JobConf conf = super.createBaseHadoopConf(context, actionXml, loadDefaultResources);
+ Configuration conf = super.createBaseHadoopConf(context, actionXml, loadDefaultResources);
return conf;
}
}