You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2015/08/04 18:34:57 UTC

ambari git commit: Revert "HADOOP-11764. [ HADOOP-11764] NodeManager should use directory other than tmp for extracting and loading leveldbjni (aonishuk)"

Repository: ambari
Updated Branches:
  refs/heads/trunk de15d42f0 -> a06a32438


Revert "HADOOP-11764. [ HADOOP-11764] NodeManager should use directory other than tmp for extracting and loading leveldbjni (aonishuk)"

This reverts commit 4c27db62466b08962ee6b9dae44cac2fd22a3c8f.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a06a3243
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a06a3243
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a06a3243

Branch: refs/heads/trunk
Commit: a06a3243866b5938f6c0972f91222bb0998c776a
Parents: de15d42
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Aug 4 19:34:29 2015 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Aug 4 19:34:29 2015 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog211.java       | 40 --------------------
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |  4 --
 .../2.1.0.2.0/package/scripts/params_linux.py   |  1 -
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  2 -
 .../before-ANY/scripts/shared_initialization.py |  8 ----
 .../services/HDFS/configuration/hadoop-env.xml  |  4 --
 .../services/HDFS/configuration/hadoop-env.xml  |  4 --
 .../server/upgrade/UpgradeCatalog211Test.java   |  9 -----
 .../2.0.6/hooks/before-ANY/test_before_any.py   |  5 ---
 9 files changed, 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
index c59c6c7..616d4c4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
@@ -26,13 +26,9 @@ import org.apache.ambari.server.orm.dao.DaoUtils;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.apache.commons.lang.StringUtils;
 
-import java.net.URI;
-import java.net.URISyntaxException;
 import java.sql.SQLException;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
 
@@ -98,7 +94,6 @@ public class UpgradeCatalog211 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
-    addMissingConfigs();
     updateExistingConfigurations();
   }
 
@@ -154,39 +149,4 @@ public class UpgradeCatalog211 extends AbstractUpgradeCatalog {
       updateConfigurationPropertiesForCluster(cluster, "kerberos-env", updates, removes, true, false);
     }
   }
-
-  protected void addMissingConfigs() throws AmbariException {
-    updateHdfsConfigs();
-  }
-
-  protected void updateHdfsConfigs() throws AmbariException {
-    AmbariManagementController ambariManagementController = injector.getInstance(
-        AmbariManagementController.class);
-    Clusters clusters = ambariManagementController.getClusters();
-
-    if (clusters != null) {
-      Map<String, Cluster> clusterMap = clusters.getClusters();
-      Map<String, String> prop = new HashMap<String, String>();
-      String content = null;
-
-      if (clusterMap != null && !clusterMap.isEmpty()) {
-        for (final Cluster cluster : clusterMap.values()) {
-          content = null;
-          if (cluster.getDesiredConfigByType("hadoop-env") != null) {
-            content = cluster.getDesiredConfigByType(
-                "hadoop-env").getProperties().get("content");
-          }
-
-          if (content != null) {
-            content += "\nexport JAVA_LIBRARY_PATH=\"${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}\"";
-            content += "\nexport _JAVA_OPTIONS=\"${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}\"\n";
-
-            prop.put("content", content);
-            updateConfigurationPropertiesForCluster(cluster, "hadoop-env",
-                prop, true, false);
-          }
-        }
-      }
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
index ab8f4ce..78b9e8a 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
@@ -194,10 +194,6 @@ export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
 # Path to jsvc required by secure HDP 2.0 datanode
 export JSVC_HOME={{jsvc_path}}
 
-# Handle a situation when /tmp is set to noexec
-export JAVA_LIBRARY_PATH="${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}"
-export _JAVA_OPTIONS="${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
-
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index e978330..59e1006 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -74,7 +74,6 @@ hadoop_secure_dn_user = hdfs_user
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 hadoop_lib_home = hdp_select.get_hadoop_dir("lib")
-hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 2de7d0c..602f630 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -19,7 +19,6 @@ limitations under the License.
 
 import collections
 import re
-import os
 
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 
@@ -91,7 +90,6 @@ hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
 versioned_hdp_root = '/usr/hdp/current'
-hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index 87defdf..0ce7280 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -152,11 +152,3 @@ def setup_hadoop_env():
       File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), owner=tc_owner,
         group=params.user_group,
         content=InlineTemplate(params.hadoop_env_sh_template))
-
-    # Create tmp dir for java.io.tmpdir
-    # Handle a situation when /tmp is set to noexec
-    Directory(params.hadoop_java_io_tmpdir,
-              owner=params.hdfs_user,
-              group=params.user_group,
-              mode=0777
-    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index e7dc00c..31dadeb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -64,10 +64,6 @@ export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 # Path to jsvc required by secure HDP 2.0 datanode
 export JSVC_HOME={{jsvc_path}}
 
-# Handle a situation when /tmp is set to noexec
-export JAVA_LIBRARY_PATH="${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}"
-export _JAVA_OPTIONS="${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
-
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 3b8a492..3b332ad 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -46,10 +46,6 @@ export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 # Path to jsvc required by secure HDP 2.0 datanode
 export JSVC_HOME={{jsvc_path}}
 
-# Handle a situation when /tmp is set to noexec
-export JAVA_LIBRARY_PATH="${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}"
-export _JAVA_OPTIONS="${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
-
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
index 8ba3a37..2ba44bf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
@@ -37,7 +37,6 @@ import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMockSupport;
@@ -52,11 +51,9 @@ import java.sql.ResultSet;
 import java.sql.Statement;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
 
 import static org.easymock.EasyMock.*;
-import static org.easymock.EasyMock.createNiceMock;
 
 
 /**
@@ -166,12 +163,9 @@ public class UpgradeCatalog211Test extends EasyMockSupport {
     Method updateKerberosConfigurations =
         UpgradeCatalog211.class.getDeclaredMethod("updateKerberosConfigurations", Cluster.class);
 
-    Method addMissingConfigs = UpgradeCatalog211.class.getDeclaredMethod("addMissingConfigs");
-
     UpgradeCatalog211 upgradeCatalog211 = createMockBuilder(UpgradeCatalog211.class)
         .addMockedMethod(addNewConfigurationsFromXml)
         .addMockedMethod(updateKerberosConfigurations)
-        .addMockedMethod(addMissingConfigs)
         .createMock();
 
     setInjector(upgradeCatalog211, injector);
@@ -182,9 +176,6 @@ public class UpgradeCatalog211Test extends EasyMockSupport {
     upgradeCatalog211.updateKerberosConfigurations(anyObject(Cluster.class));
     expectLastCall().once();
 
-    upgradeCatalog211.addMissingConfigs();
-    expectLastCall().once();
-
     replayAll();
 
     upgradeCatalog211.executeDMLUpdates();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a06a3243/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
index 6ad27f3..98ac1b2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
@@ -179,9 +179,4 @@ class TestHookBeforeInstall(RMFTestCase):
         owner = 'hdfs',
         group = 'hadoop'
     )
-    self.assertResourceCalled('Directory', '/tmp/hadoop_java_io_tmpdir',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              mode = 0777
-    )
     self.assertNoMoreResources()