You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@oozie.apache.org by ro...@apache.org on 2016/09/19 16:55:09 UTC

[1/2] oozie git commit: OOZIE-2675 Drop support for hadoop 0.23 (abhishekbafna via rohini)

Repository: oozie
Updated Branches:
  refs/heads/master 9dc474e83 -> f86107155


OOZIE-2675 Drop support for hadoop 0.23 (abhishekbafna via rohini)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/2fd64fa6
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/2fd64fa6
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/2fd64fa6

Branch: refs/heads/master
Commit: 2fd64fa646708ae684f2274729a0df6623598709
Parents: 9dc474e
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Mon Sep 19 09:53:25 2016 -0700
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Mon Sep 19 09:53:25 2016 -0700

----------------------------------------------------------------------
 .../oozie/action/hadoop/HadoopELFunctions.java  |  5 --
 .../oozie/command/wf/SubmitMRXCommand.java      |  3 --
 .../java/org/apache/oozie/util/JobUtils.java    |  2 +-
 .../java/org/apache/oozie/test/XTestCase.java   |  4 +-
 docs/src/site/twiki/DG_QuickStart.twiki         |  5 +-
 .../site/twiki/DG_SqoopActionExtension.twiki    |  2 +-
 docs/src/site/twiki/ENG_Building.twiki          |  5 +-
 hadooplibs/hadoop-auth-0.23/pom.xml             | 45 -----------------
 hadooplibs/hadoop-distcp-0.23/pom.xml           | 43 -----------------
 hadooplibs/hadoop-utils-0.23/pom.xml            | 42 ----------------
 .../action/hadoop/LauncherMainHadoopUtils.java  | 35 --------------
 .../apache/oozie/hadoop/utils/HadoopShims.java  | 51 --------------------
 hadooplibs/pom.xml                              | 11 -----
 pom.xml                                         | 12 -----
 release-log.txt                                 |  1 +
 15 files changed, 8 insertions(+), 258 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
index c322887..ad2a71d 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/HadoopELFunctions.java
@@ -52,11 +52,6 @@ public class HadoopELFunctions {
         Map<String, Map<String, Long>> counters = (Map<String, Map<String, Long>>) obj;
         if (counters == null) {
             counters = getCounters(nodeName);
-            // In Hadoop 0.23 they deprecated 'org.apache.hadoop.mapred.Task$Counter' and they REMOVED IT
-            // Here we are getting the new Name and inserting it using the old name if the old name is not found
-            if (counters.get(RECORDS) == null) {
-                counters.put(RECORDS, counters.get(RECORDS_023));
-            }
             instance.setTransientVar(nodeName + WorkflowInstance.NODE_VAR_SEPARATOR + HADOOP_COUNTERS, counters);
         }
         return counters;

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java b/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
index 9124a45..cc61d3d 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
@@ -43,9 +43,6 @@ public class SubmitMRXCommand extends SubmitHttpXCommand {
         SKIPPED_CONFS.add(WorkflowAppService.HADOOP_USER);
         SKIPPED_CONFS.add(XOozieClient.JT);
         SKIPPED_CONFS.add(XOozieClient.NN);
-        // a brillant mind made a change in Configuration that 'fs.default.name' key gets converted to 'fs.defaultFS'
-        // in Hadoop 0.23, we need skip that one too, keeping the old one because of Hadoop 1
-        SKIPPED_CONFS.add(XOozieClient.NN_2);
 
         DEPRECATE_MAP.put(XOozieClient.NN, XOozieClient.NN_2);
         DEPRECATE_MAP.put(XOozieClient.JT, XOozieClient.JT_2);

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/core/src/main/java/org/apache/oozie/util/JobUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/JobUtils.java b/core/src/main/java/org/apache/oozie/util/JobUtils.java
index a4d7272..a7a53b3 100644
--- a/core/src/main/java/org/apache/oozie/util/JobUtils.java
+++ b/core/src/main/java/org/apache/oozie/util/JobUtils.java
@@ -162,7 +162,7 @@ public class JobUtils {
             URI uri = fs.makeQualified(file).toUri();
             DistributedCache.addCacheFile(uri, conf);
         }
-        else { // Hadoop 0.23/2.x
+        else { // Hadoop 2.x
             DistributedCache.addFileToClassPath(file, conf, fs);
         }
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index 8ce44f3..3dd99d7 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -967,9 +967,7 @@ public abstract class XTestCase extends TestCase {
       conf.set("dfs.permissions", "true");
       conf.set("hadoop.security.authentication", "simple");
 
-      //Doing this because Hadoop 1.x does not support '*' and
-      //Hadoop 0.23.x does not process wildcard if the value is
-      // '*,127.0.0.1'
+      //Doing this because Hadoop 1.x does not support '*' if the value is '*,127.0.0.1'
       StringBuilder sb = new StringBuilder();
       sb.append("127.0.0.1,localhost");
       for (InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki b/docs/src/site/twiki/DG_QuickStart.twiki
index 2cfaa51..0653f08 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -41,10 +41,9 @@ suitable when same oozie package needs to be used in multiple set-ups with diffe
 
 2. Build with -Puber which will bundle the required libraries in the oozie war. Further, the following options are
 available to customise the versions of the dependencies:
--P<profile> - default hadoop-1. Valid are hadoop-1, hadoop-0.23, hadoop-2 or hadoop-3. Choose the correct hadoop
+-P<profile> - default hadoop-1. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
 profile depending on the hadoop version used.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 0.23.5 for hadoop-0.23, 2.3.0 for hadoop-2 and 3.0.0-SNAPSHOT
-    for hadoop-3
+-Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.3.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
 -Dhadoop.auth.version=<version> - defaults to hadoop version
 -Ddistcp.version=<version> - defaults to hadoop version
 -Dpig.version=<version> - default 0.16.0

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/docs/src/site/twiki/DG_SqoopActionExtension.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_SqoopActionExtension.twiki b/docs/src/site/twiki/DG_SqoopActionExtension.twiki
index 1e7f2a8..906ba54 100644
--- a/docs/src/site/twiki/DG_SqoopActionExtension.twiki
+++ b/docs/src/site/twiki/DG_SqoopActionExtension.twiki
@@ -10,7 +10,7 @@
 
 ---++ Sqoop Action
 
-*IMPORTANT:* The Sqoop action requires Apache Hadoop 0.23.
+*IMPORTANT:* The Sqoop action requires Apache Hadoop 1.x or 2.x.
 
 The =sqoop= action runs a Sqoop job.
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/ENG_Building.twiki b/docs/src/site/twiki/ENG_Building.twiki
index d138817..9864098 100644
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@ -212,10 +212,9 @@ $ bin/mkdistro.sh [-DskipTests]
 Running =mkdistro.sh= will create the binary distribution of Oozie. The following options are available to customise
 the versions of the dependencies:
 -Puber - Bundle required hadoop and hcatalog libraries in oozie war
--P<profile> - default hadoop-1. Valid are hadoop-1, hadoop-0.23, hadoop-2 or hadoop-3. Choose the correct hadoop
+-P<profile> - default hadoop-1. Valid are hadoop-1, hadoop-2 or hadoop-3. Choose the correct hadoop
 profile depending on the hadoop version used.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 0.23.5 for hadoop-0.23, 2.3.0 for hadoop-2 and 3.0
-.0-SNAPSHOT for hadoop-3
+-Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 2.3.0 for hadoop-2 and 3.0.0-SNAPSHOT for hadoop-3
 -Dhadoop.auth.version=<version> - defaults to hadoop version
 -Ddistcp.version=<version> - defaults to hadoop version
 -Dpig.version=<version> - default 0.16.0

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/hadooplibs/hadoop-auth-0.23/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-auth-0.23/pom.xml b/hadooplibs/hadoop-auth-0.23/pom.xml
deleted file mode 100644
index 7b3b466..0000000
--- a/hadooplibs/hadoop-auth-0.23/pom.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-auth</artifactId>
-    <version>hadoop-0.23-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Auth</description>
-    <name>Apache Oozie Hadoop Auth ${project.version} Test</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-auth</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-
-
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/hadooplibs/hadoop-distcp-0.23/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-0.23/pom.xml b/hadooplibs/hadoop-distcp-0.23/pom.xml
deleted file mode 100644
index 60c8e02..0000000
--- a/hadooplibs/hadoop-distcp-0.23/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>hadoop-0.23-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Distcp ${project.version}</description>
-    <name>Apache Oozie Hadoop Distcp ${project.version}</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-distcp</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/hadooplibs/hadoop-utils-0.23/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-0.23/pom.xml b/hadooplibs/hadoop-utils-0.23/pom.xml
deleted file mode 100644
index a55e647..0000000
--- a/hadooplibs/hadoop-utils-0.23/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-utils</artifactId>
-    <version>hadoop-0.23-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Utils</description>
-    <name>Apache Oozie Hadoop Utils</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-    </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java b/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
deleted file mode 100644
index dca7820..0000000
--- a/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/action/hadoop/LauncherMainHadoopUtils.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.action.hadoop;
-
-import org.apache.hadoop.conf.Configuration;
-
-
-public class LauncherMainHadoopUtils {
-
-    public static final String CHILD_MAPREDUCE_JOB_TAGS = "oozie.child.mapreduce.job.tags";
-    public static final String OOZIE_JOB_LAUNCH_TIME = "oozie.job.launch.time";
-
-    private LauncherMainHadoopUtils() {
-    }
-
-    public static void killChildYarnJobs(Configuration actionConf) {
-        // no-op
-    }
-}

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java b/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
deleted file mode 100644
index 799dffb..0000000
--- a/hadooplibs/hadoop-utils-0.23/src/main/java/org/apache/oozie/hadoop/utils/HadoopShims.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.hadoop.utils;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import java.io.IOException;
-
-public class HadoopShims {
-    FileSystem fs;
-
-    public HadoopShims(FileSystem fs) {
-        this.fs = fs;
-    }
-
-    public static boolean isSymlinkSupported() {
-        return false;
-    }
-
-    public Path getSymLinkTarget(Path p) throws IOException {
-        return p;
-    }
-
-    public boolean isSymlink(Path p) throws IOException {
-        return false;
-    }
-
-    public void createSymlink(Path target, Path link, boolean createParent) throws IOException {
-    }
-
-    public static boolean isYARN() {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/hadooplibs/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/pom.xml b/hadooplibs/pom.xml
index 17f1f07..cd0f478 100644
--- a/hadooplibs/pom.xml
+++ b/hadooplibs/pom.xml
@@ -56,17 +56,6 @@
             </modules>
         </profile>
         <profile>
-            <id>hadoop-0.23</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <modules>
-                <module>hadoop-utils-0.23</module>
-                <module>hadoop-distcp-0.23</module>
-                <module>hadoop-auth-0.23</module>
-            </modules>
-        </profile>
-        <profile>
             <id>hadoop-2</id>
             <activation>
                 <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5a8e5f5..2cbc91f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1797,18 +1797,6 @@
             </properties>
         </profile>
         <profile>
-            <id>hadoop-0.23</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <properties>
-                <hadoop.version>0.23.5</hadoop.version>
-                <hadoop.majorversion>0.23</hadoop.majorversion>
-                <pig.classifier>h2</pig.classifier>
-                <sqoop.classifier>hadoop23</sqoop.classifier>
-            </properties>
-        </profile>
-        <profile>
             <id>hadoop-2</id>
             <activation>
                 <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/oozie/blob/2fd64fa6/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index d1e36be..681e5ee 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 4.3.0 release (trunk - unreleased)
 
+OOZIE-2675 Drop support for hadoop 0.23 (abhishekbafna via rohini)
 OOZIE-2588 Support getting credentials for same cluster hcat when credentials config is empty (satishsaley via rohini)
 OOZIE-2538 Update HttpClient versions to close security vulnerabilities (abhishekbafna via rkanter)
 OOZIE-2037 Add TLSv1.1,TLSv1.2 (rkanter)


[2/2] oozie git commit: New testcase of OOZIE-2588 which was missed in the first commit

Posted by ro...@apache.org.
New testcase of OOZIE-2588 which was missed in the first commit


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/f8610715
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/f8610715
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/f8610715

Branch: refs/heads/master
Commit: f86107155453d08d7e21cdfe41bc5bb3916d991b
Parents: 2fd64fa
Author: Rohini Palaniswamy <ro...@apache.org>
Authored: Mon Sep 19 09:55:13 2016 -0700
Committer: Rohini Palaniswamy <ro...@apache.org>
Committed: Mon Sep 19 09:55:13 2016 -0700

----------------------------------------------------------------------
 .../action/hadoop/TestHCatCredentials.java      | 258 +++++++++++++++++++
 1 file changed, 258 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/f8610715/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
new file mode 100644
index 0000000..e6d43ca
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
@@ -0,0 +1,258 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.action.hadoop;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.HashMap;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.oozie.service.HCatAccessorService;
+import org.apache.oozie.service.ServiceException;
+import org.apache.oozie.service.Services;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ HCatCredentialHelper.class, HCatCredentials.class })
+public class TestHCatCredentials {
+    private Services services;
+    private static File OOZIE_HOME_DIR = null;
+    private static final String TEST_HIVE_METASTORE_PRINCIPAL = "hcat/test-hcat1.com@OOZIE.EXAMPLE.COM";
+    private static final String TEST_HIVE_METASTORE_URI = "thrift://test-hcat1.com:9898";
+    private static final String TEST_HIVE_METASTORE_PRINCIPAL2 = "hcat/test-hcat2.com@OOZIE.EXAMPLE.COM";
+    private static final String TEST_HIVE_METASTORE_URI2 = "thrift://test-hcat2.com:9898";
+    final String HIVE_METASTORE_PRINCIPAL = "hive.principal";
+    final String HIVE_METASTORE_URI = "hive.uri";
+    final String HCAT_METASTORE_PRINCIPAL = "hcat.principal";
+    final String HCAT_METASTORE_URI = "hcat.uri";
+    private static File hiveSiteXml = null;
+    private static ClassLoader prevClassloader = null;
+
+    @BeforeClass
+    public static void initialize() throws Exception {
+        OOZIE_HOME_DIR = new File(new File("").getAbsolutePath(), "test-oozie-home");
+        if (!OOZIE_HOME_DIR.exists()) {
+            OOZIE_HOME_DIR.mkdirs();
+        }
+        System.setProperty(Services.OOZIE_HOME_DIR, OOZIE_HOME_DIR.getAbsolutePath());
+        Services.setOozieHome();
+        File oozieConfDir = new File(OOZIE_HOME_DIR.getAbsolutePath(), "conf");
+        oozieConfDir.mkdir();
+        File hadoopConfDir = new File(oozieConfDir, "hadoop-conf");
+        hadoopConfDir.mkdir();
+        File actionConfDir = new File(oozieConfDir, "action-conf");
+        actionConfDir.mkdir();
+        hiveSiteXml = new File(OOZIE_HOME_DIR, "hive-site.xml");
+        FileWriter fw = new FileWriter(hiveSiteXml);
+        fw.write(getHiveConfig(TEST_HIVE_METASTORE_PRINCIPAL, TEST_HIVE_METASTORE_URI));
+        fw.flush();
+        fw.close();
+        prevClassloader = Thread.currentThread().getContextClassLoader();
+    }
+
+    @Before
+    public void setUp() throws ServiceException, MalformedURLException {
+        services = new Services();
+        @SuppressWarnings("deprecation")
+        Configuration conf = services.getConf();
+        conf.set(Services.CONF_SERVICE_EXT_CLASSES, HCatAccessorService.class.getName());
+        conf.set(Services.CONF_SERVICE_CLASSES, "");
+        ContextClassLoader contextClassLoader = new ContextClassLoader(HCatCredentials.class.getClassLoader());
+        contextClassLoader.addURL(hiveSiteXml.toURI().toURL());
+        Thread.currentThread().setContextClassLoader(contextClassLoader);
+    }
+
+    @After
+    public void tearDown(){
+        if (services != null) {
+            services.destroy();
+        }
+    }
+
+    @AfterClass
+    public static void terminate() throws IOException {
+        FileUtils.deleteDirectory(OOZIE_HOME_DIR);
+        Thread.currentThread().setContextClassLoader(prevClassloader);
+    }
+
+    @Test
+    public void testAddToJobConfFromHCat() throws Exception {
+        File hcatConfig = new File(OOZIE_HOME_DIR, "hcatConf.xml");
+        FileWriter fw = new FileWriter(hcatConfig);
+        fw.write(getHiveConfig(TEST_HIVE_METASTORE_PRINCIPAL2, TEST_HIVE_METASTORE_URI2));
+        fw.flush();
+        fw.close();
+        @SuppressWarnings("deprecation")
+        Configuration conf = services.getConf();
+        conf.set(HCatAccessorService.HCAT_CONFIGURATION, OOZIE_HOME_DIR + "/hcatConf.xml");
+        services.init();
+        HCatCredentialHelper hcatCredHelper = Mockito.mock(HCatCredentialHelper.class);
+        PowerMockito.whenNew(HCatCredentialHelper.class).withNoArguments().thenReturn(hcatCredHelper);
+        CredentialsProperties credProps = new CredentialsProperties("", "");
+        credProps.setProperties(new HashMap<String, String>());
+        HCatCredentials hcatCred = new HCatCredentials();
+        final JobConf jobConf = new JobConf(false);
+        PowerMockito.doAnswer(new Answer<Void>() {
+            @Override
+            public Void answer(InvocationOnMock invocation) throws Throwable {
+                Object[] args = invocation.getArguments();
+                JobConf jConf = (JobConf) args[0];
+                jConf.set(HCAT_METASTORE_PRINCIPAL, (String) args[1]);
+                jConf.set(HCAT_METASTORE_URI, (String) args[2]);
+                return null;
+            }
+        }).when(hcatCredHelper).set(jobConf, TEST_HIVE_METASTORE_PRINCIPAL2, TEST_HIVE_METASTORE_URI2);
+        hcatCred.addtoJobConf(jobConf, credProps, null);
+        assertEquals(TEST_HIVE_METASTORE_PRINCIPAL2, jobConf.get(HCAT_METASTORE_PRINCIPAL));
+        assertEquals(TEST_HIVE_METASTORE_URI2, jobConf.get(HCAT_METASTORE_URI));
+        assertNull(jobConf.get(HIVE_METASTORE_PRINCIPAL));
+        assertNull(jobConf.get(HIVE_METASTORE_URI));
+        hcatConfig.delete();
+    }
+
+    @Test
+    public void testAddToJobConfFromHiveConf() throws Exception {
+        services.init();
+        CredentialsProperties credProps = new CredentialsProperties("", "");
+        credProps.setProperties(new HashMap<String, String>());
+        HCatCredentials hcatCred = new HCatCredentials();
+        final JobConf jobConf = new JobConf(false);
+        HCatCredentialHelper hcatCredHelper = Mockito.mock(HCatCredentialHelper.class);
+        PowerMockito.whenNew(HCatCredentialHelper.class).withNoArguments().thenReturn(hcatCredHelper);
+        PowerMockito.doAnswer(new Answer<Void>() {
+            @Override
+            public Void answer(InvocationOnMock invocation) throws Throwable {
+                Object[] args = invocation.getArguments();
+                JobConf jConf = (JobConf) args[0];
+                jConf.set(HIVE_METASTORE_PRINCIPAL, (String) args[1]);
+                jConf.set(HIVE_METASTORE_URI, (String) args[2]);
+                return null;
+            }
+        }).when(hcatCredHelper).set(jobConf, TEST_HIVE_METASTORE_PRINCIPAL, TEST_HIVE_METASTORE_URI);
+        hcatCred.addtoJobConf(jobConf, credProps, null);
+        assertEquals(TEST_HIVE_METASTORE_PRINCIPAL, jobConf.get(HIVE_METASTORE_PRINCIPAL));
+        assertEquals(TEST_HIVE_METASTORE_URI, jobConf.get(HIVE_METASTORE_URI));
+        assertNull(jobConf.get(HCAT_METASTORE_PRINCIPAL));
+        assertNull(jobConf.get(HCAT_METASTORE_URI));
+    }
+
+    @Test
+    public void testAddToJobConfFromOozieConfig() throws Exception {
+        services.init();
+        HCatCredentialHelper hcatCredHelper = Mockito.mock(HCatCredentialHelper.class);
+        PowerMockito.whenNew(HCatCredentialHelper.class).withNoArguments().thenReturn(hcatCredHelper);
+        CredentialsProperties credProps = new CredentialsProperties("", "");
+        HashMap<String, String> prop = new HashMap<String, String>();
+        prop.put("hcat.metastore.principal", TEST_HIVE_METASTORE_PRINCIPAL2);
+        prop.put("hcat.metastore.uri", TEST_HIVE_METASTORE_URI2);
+        credProps.setProperties(prop);
+        HCatCredentials hcatCred = new HCatCredentials();
+        final JobConf jobConf = new JobConf(false);
+        PowerMockito.doAnswer(new Answer<Void>() {
+            @Override
+            public Void answer(InvocationOnMock invocation) throws Throwable {
+                Object[] args = invocation.getArguments();
+                JobConf jConf = (JobConf) args[0];
+                jConf.set(HCAT_METASTORE_PRINCIPAL, (String) args[1]);
+                jConf.set(HCAT_METASTORE_URI, (String) args[2]);
+                return null;
+            }
+        }).when(hcatCredHelper).set(jobConf, TEST_HIVE_METASTORE_PRINCIPAL2, TEST_HIVE_METASTORE_URI2);
+        hcatCred.addtoJobConf(jobConf, credProps, null);
+        assertEquals(TEST_HIVE_METASTORE_PRINCIPAL2, jobConf.get(HCAT_METASTORE_PRINCIPAL));
+        assertEquals(TEST_HIVE_METASTORE_URI2, jobConf.get(HCAT_METASTORE_URI));
+        assertNull(jobConf.get(HIVE_METASTORE_PRINCIPAL));
+        assertNull(jobConf.get(HIVE_METASTORE_URI));
+    }
+
+    private static String getHiveConfig(String hivePrincipal, String hiveUri) {
+        return "<configuration>"
+                + "<property>"
+                    + "<name>hive.metastore.kerberos.principal</name>"
+                    + "<value>"+ hivePrincipal + "</value>"
+                + "</property>"
+                + "<property>"
+                    + "<name>hive.metastore.uris</name>"
+                    + "<value>" + hiveUri + "</value>"
+                + "</property>"
+                + "</configuration>";
+    }
+
+    private static class ContextClassLoader extends URLClassLoader {
+        // Map the resource name to its url
+        private HashMap<String, URL> resources = new HashMap<String, URL>();
+
+        @Override
+        public URL findResource(String name) {
+            if (resources.containsKey(name)) {
+                return resources.get(name);
+            }
+            return super.findResource(name);
+        }
+
+        @Override
+        public URL getResource(String name) {
+            if (resources.containsKey(name)) {
+                return resources.get(name);
+            }
+            return super.getResource(name);
+        }
+
+        public ContextClassLoader(ClassLoader classLoader) {
+            this(new URL[0], classLoader);
+        }
+
+        public ContextClassLoader(URL[] urls, ClassLoader classLoader) {
+            super(urls, classLoader);
+        }
+
+        @Override
+        public void addURL(URL url) {
+            super.addURL(url);
+            try {
+                resources.put(new Path(url.toURI()).getName(), url);
+            }
+            catch (URISyntaxException e) {
+                e.printStackTrace(System.out);
+            }
+        }
+    };
+}
\ No newline at end of file