You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/04/24 02:21:15 UTC

[33/45] ambari git commit: AMBARI-20733. /var/log/krb5kdc.log is growing rapidly on the KDC server (echekanskiy)

AMBARI-20733. /var/log/krb5kdc.log is growing rapidly on the KDC server (echekanskiy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b299641a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b299641a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b299641a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b299641a076266e8f2a19f55068c89d56bc602b7
Parents: 84d2b3a
Author: Eugene Chekanskiy <ec...@hortonworks.com>
Authored: Fri Apr 21 17:54:13 2017 +0300
Committer: Eugene Chekanskiy <ec...@hortonworks.com>
Committed: Fri Apr 21 17:54:13 2017 +0300

----------------------------------------------------------------------
 .../src/main/python/ambari_agent/ActionQueue.py |   9 +-
 .../ambari_agent/CustomServiceOrchestrator.py   |  33 +-----
 .../test/python/ambari_agent/TestActionQueue.py |  13 +--
 .../TestCustomServiceOrchestrator.py            |  51 --------
 .../libraries/script/script.py                  |  16 ---
 .../ambari/server/agent/ComponentStatus.java    |  28 +----
 .../ambari/server/agent/HeartbeatProcessor.java |  20 ----
 .../package/scripts/accumulo_script.py          |  50 --------
 .../0.1.0/package/scripts/metrics_collector.py  |  66 +----------
 .../package/scripts/metadata_server.py          |  78 -------------
 .../0.5.0.2.1/package/scripts/falcon_client.py  |  10 --
 .../0.5.0.2.1/package/scripts/falcon_server.py  |  59 ----------
 .../0.96.0.2.0/package/scripts/hbase_master.py  |  49 --------
 .../package/scripts/hbase_regionserver.py       |  49 --------
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |  58 ---------
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |  45 -------
 .../2.1.0.2.0/package/scripts/journalnode.py    |  57 ---------
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |  57 ---------
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |  58 ---------
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |  60 ----------
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |  43 -------
 .../HDFS/3.0.0.3.0/package/scripts/datanode.py  |  58 ---------
 .../3.0.0.3.0/package/scripts/hdfs_client.py    |  45 -------
 .../3.0.0.3.0/package/scripts/journalnode.py    |  57 ---------
 .../HDFS/3.0.0.3.0/package/scripts/namenode.py  |  57 ---------
 .../3.0.0.3.0/package/scripts/nfsgateway.py     |  58 ---------
 .../HDFS/3.0.0.3.0/package/scripts/snamenode.py |  60 ----------
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |  43 -------
 .../package/scripts/hive_metastore.py           |  52 ---------
 .../0.12.0.2.0/package/scripts/hive_server.py   |  61 ----------
 .../package/scripts/hive_server_interactive.py  |  61 ----------
 .../package/scripts/webhcat_server.py           |  67 -----------
 .../2.1.0.3.0/package/scripts/hive_metastore.py |  52 ---------
 .../2.1.0.3.0/package/scripts/hive_server.py    |  61 ----------
 .../package/scripts/hive_server_interactive.py  |  61 ----------
 .../2.1.0.3.0/package/scripts/webhcat_server.py |  67 -----------
 .../package/scripts/kerberos_client.py          |  21 ----
 .../0.5.0.2.2/package/scripts/knox_gateway.py   |  61 ----------
 .../4.0.0.2.0/package/scripts/oozie_server.py   |  63 ----------
 .../STORM/0.9.1/package/scripts/drpc_server.py  |  52 ---------
 .../STORM/0.9.1/package/scripts/nimbus.py       |  45 -------
 .../STORM/0.9.1/package/scripts/pacemaker.py    |  52 ---------
 .../STORM/0.9.1/package/scripts/ui_server.py    |  53 ---------
 .../scripts/application_timeline_server.py      |  61 ----------
 .../2.1.0.2.0/package/scripts/historyserver.py  |  56 ---------
 .../2.1.0.2.0/package/scripts/nodemanager.py    |  60 ----------
 .../package/scripts/resourcemanager.py          |  60 ----------
 .../scripts/application_timeline_server.py      |  61 ----------
 .../3.0.0.3.0/package/scripts/historyserver.py  |  56 ---------
 .../3.0.0.3.0/package/scripts/nodemanager.py    |  60 ----------
 .../package/scripts/resourcemanager.py          |  60 ----------
 .../3.4.5/package/scripts/zookeeper_server.py   |  51 --------
 .../KERBEROS/package/scripts/kerberos_client.py |  21 ----
 .../server/agent/HeartbeatProcessorTest.java    |   7 --
 .../server/agent/TestHeartbeatHandler.java      |  13 ---
 .../stacks/2.0.6/HBASE/test_hbase_master.py     | 102 ----------------
 .../2.0.6/HBASE/test_hbase_regionserver.py      | 104 -----------------
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 111 ------------------
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       | 100 ----------------
 .../stacks/2.0.6/HDFS/test_journalnode.py       | 114 ------------------
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 114 ------------------
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 116 ------------------
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  | 117 +------------------
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  | 102 +---------------
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 112 ------------------
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 116 ------------------
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 113 ------------------
 .../stacks/2.0.6/YARN/test_historyserver.py     | 106 -----------------
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 109 -----------------
 .../stacks/2.0.6/YARN/test_resourcemanager.py   | 108 -----------------
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    | 103 ----------------
 .../stacks/2.1/FALCON/test_falcon_client.py     |  24 ----
 .../stacks/2.1/FALCON/test_falcon_server.py     | 109 -----------------
 .../stacks/2.1/HIVE/test_hive_metastore.py      | 113 ------------------
 .../stacks/2.1/STORM/test_storm_drpc_server.py  | 104 -----------------
 .../stacks/2.1/STORM/test_storm_nimbus.py       | 103 ----------------
 .../stacks/2.1/STORM/test_storm_ui_server.py    |  82 -------------
 .../stacks/2.1/YARN/test_apptimelineserver.py   | 110 -----------------
 .../python/stacks/2.2/KNOX/test_knox_gateway.py | 102 ----------------
 .../stacks/2.5/ATLAS/test_atlas_server.py       |  42 -------
 81 files changed, 13 insertions(+), 5211 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 75880c6..1eda5c2 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -498,16 +498,14 @@ class ActionQueue(threading.Thread):
 
   def execute_status_command_and_security_status(self, command):
     component_status_result = self.customServiceOrchestrator.requestComponentStatus(command)
-    component_security_status_result = self.customServiceOrchestrator.requestComponentSecurityState(command)
-
-    return command, component_status_result, component_security_status_result
+    return command, component_status_result
 
   def process_status_command_result(self, result):
     '''
     Executes commands of type STATUS_COMMAND
     '''
     try:
-      command, component_status_result, component_security_status_result = result
+      command, component_status_result = result
       cluster = command['clusterName']
       service = command['serviceName']
       component = command['componentName']
@@ -548,9 +546,6 @@ class ActionQueue(threading.Thread):
       if self.controller.recovery_manager.enabled():
         result['sendExecCmdDet'] = str(request_execution_cmd)
 
-      # Add security state to the result
-      result['securityState'] = component_security_status_result
-
       if component_extra is not None and len(component_extra) != 0:
         if component_extra.has_key('alerts'):
           result['alerts'] = component_extra['alerts']

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index a67e16e..8b8a8f9 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -47,7 +47,6 @@ class CustomServiceOrchestrator():
   SCRIPT_TYPE_PYTHON = "PYTHON"
   COMMAND_TYPE = "commandType"
   COMMAND_NAME_STATUS = "STATUS"
-  COMMAND_NAME_SECURITY_STATUS = "SECURITY_STATUS"
   CUSTOM_ACTION_COMMAND = 'ACTIONEXECUTE'
   CUSTOM_COMMAND_COMMAND = 'CUSTOM_COMMAND'
 
@@ -63,7 +62,7 @@ class CustomServiceOrchestrator():
   AMBARI_SERVER_PORT = "ambari_server_port"
   AMBARI_SERVER_USE_SSL = "ambari_server_use_ssl"
 
-  FREQUENT_COMMANDS = [COMMAND_NAME_SECURITY_STATUS, COMMAND_NAME_STATUS]
+  FREQUENT_COMMANDS = [COMMAND_NAME_STATUS]
   DONT_DEBUG_FAILURES_FOR_COMMANDS = FREQUENT_COMMANDS
   REFLECTIVELY_RUN_COMMANDS = FREQUENT_COMMANDS # -- commands which run a lot and often (this increases their speed)
   DONT_BACKUP_LOGS_FOR_COMMANDS = FREQUENT_COMMANDS
@@ -467,36 +466,6 @@ class CustomServiceOrchestrator():
                           override_output_files=override_output_files)
     return res
 
-  def requestComponentSecurityState(self, command):
-    """
-     Determines the current security state of the component
-     A command will be issued to trigger the security_status check and the result of this check will
-     returned to the caller. If the component lifecycle script has no security_status method the
-     check will return non zero exit code and "UNKNOWN" will be returned.
-    """
-    override_output_files=True # by default, we override status command output
-    if logger.level == logging.DEBUG:
-      override_output_files = False
-    security_check_res = self.runCommand(command, self.status_commands_stdout,
-                                         self.status_commands_stderr, self.COMMAND_NAME_SECURITY_STATUS,
-                                         override_output_files=override_output_files)
-    result = 'UNKNOWN'
-
-    if security_check_res is None:
-      logger.warn("The return value of the security_status check was empty, the security status is unknown")
-    elif 'exitcode' not in security_check_res:
-      logger.warn("Missing 'exitcode' value from the security_status check result, the security status is unknown")
-    elif security_check_res['exitcode'] != 0:
-      logger.debug("The 'exitcode' value from the security_status check result indicated the check routine failed to properly execute, the security status is unknown")
-    elif 'structuredOut' not in security_check_res:
-      logger.warn("Missing 'structuredOut' value from the security_status check result, the security status is unknown")
-    elif 'securityState' not in security_check_res['structuredOut']:
-      logger.warn("Missing 'securityState' value from the security_status check structuredOut data set, the security status is unknown")
-    else:
-      result = security_check_res['structuredOut']['securityState']
-
-    return result
-
   def resolve_script_path(self, base_dir, script):
     """
     Encapsulates logic of script location determination.

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index ab46f96..faa9b81 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -988,12 +988,11 @@ class TestActionQueue(TestCase):
 
     dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())
 
-    result = (self.status_command, {'exitcode': 0 }, 'UNKNOWN')
+    result = (self.status_command, {'exitcode': 0 })
 
     actionQueue.process_status_command_result(result)
     report = actionQueue.result()
-    expected = {'dummy report': '',
-                'securityState' : 'UNKNOWN'}
+    expected = {'dummy report': ''}
 
     self.assertEqual(len(report['componentStatus']), 1)
     self.assertEqual(report['componentStatus'][0], expected)
@@ -1019,12 +1018,11 @@ class TestActionQueue(TestCase):
 
     dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp(), True, False)
 
-    result = (self.status_command, {'exitcode': 0 }, 'UNKNOWN')
+    result = (self.status_command, {'exitcode': 0 })
 
     actionQueue.process_status_command_result(result)
     report = actionQueue.result()
     expected = {'dummy report': '',
-                'securityState' : 'UNKNOWN',
                 'sendExecCmdDet': 'True'}
 
     self.assertEqual(len(report['componentStatus']), 1)
@@ -1033,12 +1031,11 @@ class TestActionQueue(TestCase):
     requires_recovery_mock.return_value = True
     command_exists_mock.return_value = True
     
-    result = (self.status_command, {'exitcode': 0 }, 'UNKNOWN')
+    result = (self.status_command, {'exitcode': 0 })
 
     actionQueue.process_status_command_result(result)
     report = actionQueue.result()
     expected = {'dummy report': '',
-                'securityState' : 'UNKNOWN',
                 'sendExecCmdDet': 'False'}
 
     self.assertEqual(len(report['componentStatus']), 1)
@@ -1062,7 +1059,7 @@ class TestActionQueue(TestCase):
       'structuredOut': {'alerts': [ {'name': 'flume_alert'} ] }
     }
     
-    result = (self.status_command_for_alerts, command_return_value, command_return_value)
+    result = (self.status_command_for_alerts, command_return_value)
     
     build_mock.return_value = {'somestatusresult': 'aresult'}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index 8e5e9a3..c54ffca 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -569,57 +569,6 @@ class TestCustomServiceOrchestrator(TestCase):
     status = orchestrator.requestComponentStatus(status_command)
     self.assertEqual(runCommand_mock.return_value, status)
 
-  @patch.object(CustomServiceOrchestrator, "runCommand")
-  @patch.object(FileCache, "__init__")
-  def test_requestComponentSecurityState(self, FileCache_mock, runCommand_mock):
-    FileCache_mock.return_value = None
-    status_command = {
-      "serviceName" : 'HDFS',
-      "commandType" : "STATUS_COMMAND",
-      "clusterName" : "",
-      "componentName" : "DATANODE",
-      'configurations':{}
-    }
-    dummy_controller = MagicMock()
-    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
-    # Test securityState
-    runCommand_mock.return_value = {
-      'exitcode' : 0,
-      'structuredOut' : {'securityState': 'UNSECURED'}
-    }
-
-    status = orchestrator.requestComponentSecurityState(status_command)
-    self.assertEqual('UNSECURED', status)
-
-    # Test case where exit code indicates failure
-    runCommand_mock.return_value = {
-      "exitcode" : 1
-    }
-    status = orchestrator.requestComponentSecurityState(status_command)
-    self.assertEqual('UNKNOWN', status)
-
-  @patch.object(FileCache, "__init__")
-  def test_requestComponentSecurityState_realFailure(self, FileCache_mock):
-    '''
-    Tests the case where the CustomServiceOrchestrator attempts to call a service's security_status
-    method, but fails to do so because the script or method was not found.
-    :param FileCache_mock:
-    :return:
-    '''
-    FileCache_mock.return_value = None
-    status_command = {
-      "serviceName" : 'BOGUS_SERVICE',
-      "commandType" : "STATUS_COMMAND",
-      "clusterName" : "",
-      "componentName" : "DATANODE",
-      'configurations':{}
-    }
-    dummy_controller = MagicMock()
-    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
-
-    status = orchestrator.requestComponentSecurityState(status_command)
-    self.assertEqual('UNKNOWN', status)
-
 
   @patch.object(CustomServiceOrchestrator, "get_py_executor")
   @patch.object(CustomServiceOrchestrator, "dump_command_to_json")

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index d0e5678..a08feab 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -864,22 +864,6 @@ class Script(object):
     """
     self.fail_with_error('configure method isn\'t implemented')
 
-  def security_status(self, env):
-    """
-    To be overridden by subclasses to provide the current security state of the component.
-    Implementations are required to set the "securityState" property of the structured out data set
-    to one of the following values:
-
-      UNSECURED        - If the component is not configured for any security protocol such as
-                         Kerberos
-      SECURED_KERBEROS - If the component is configured for Kerberos
-      UNKNOWN          - If the security state cannot be determined
-      ERROR            - If the component is supposed to be secured, but there are issues with the
-                         configuration.  For example, if the component is configured for Kerberos
-                         but the configured principal and keytab file fail to kinit
-    """
-    self.put_structured_out({"securityState": "UNKNOWN"})
-
   def generate_configs_get_template_file_content(self, filename, dicts):
     config = self.get_config()
     content = ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/java/org/apache/ambari/server/agent/ComponentStatus.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ComponentStatus.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ComponentStatus.java
index 5591ae8..68e1734 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ComponentStatus.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ComponentStatus.java
@@ -28,12 +28,6 @@ public class ComponentStatus {
   private String msg;
   private String status;
 
-  /**
-   * A String declaring the component's security state
-   *
-   * @see org.apache.ambari.server.state.SecurityState
-   */
-  private String securityState;
   private String sendExecCmdDet = "False";
 
   private String serviceName;
@@ -74,26 +68,6 @@ public class ComponentStatus {
     this.status = status;
   }
 
-  /**
-   * Gets the relevant component's security state.
-   *
-   * @return a String declaring this component's security state
-   * @see org.apache.ambari.server.state.SecurityState
-   */
-  public String getSecurityState() {
-    return securityState;
-  }
-
-  /**
-   * Sets the relevant component's security state.
-   *
-   * @param securityState a String declaring this component's security state
-   * @see org.apache.ambari.server.state.SecurityState
-   */
-  public void setSecurityState(String securityState) {
-    this.securityState = securityState;
-  }
-
   public String getStackVersion() {
     return stackVersion;
   }
@@ -158,7 +132,7 @@ public class ComponentStatus {
   @Override
   public String toString() {
     return "ComponentStatus [componentName=" + componentName + ", msg=" + msg
-        + ", status=" + status + ", securityState=" + securityState
+        + ", status=" + status
         + ", serviceName=" + serviceName + ", clusterName=" + clusterName
         + ", stackVersion=" + stackVersion + ", configurationTags="
         + configurationTags + ", extra=" + extra + "]";

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
index 8cd2804..c1028dc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
@@ -56,7 +56,6 @@ import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostHealthStatus;
 import org.apache.ambari.server.state.MaintenanceState;
-import org.apache.ambari.server.state.SecurityState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
@@ -617,25 +616,6 @@ public class HeartbeatProcessor extends AbstractService{
                 }
               }
 
-              SecurityState prevSecurityState = scHost.getSecurityState();
-              SecurityState currentSecurityState = SecurityState.valueOf(status.getSecurityState());
-              if((prevSecurityState != currentSecurityState)) {
-                if(prevSecurityState.isEndpoint()) {
-                  scHost.setSecurityState(currentSecurityState);
-                  LOG.info(String.format("Security of service component %s of service %s of cluster %s " +
-                          "has changed from %s to %s on host %s",
-                      componentName, status.getServiceName(), status.getClusterName(), prevSecurityState,
-                      currentSecurityState, hostname));
-                }
-                else {
-                  LOG.debug(String.format("Security of service component %s of service %s of cluster %s " +
-                          "has changed from %s to %s on host %s but will be ignored since %s is a " +
-                          "transitional state",
-                      componentName, status.getServiceName(), status.getClusterName(),
-                      prevSecurityState, currentSecurityState, hostname, prevSecurityState));
-                }
-              }
-
               if (null != status.getStackVersion() && !status.getStackVersion().isEmpty()) {
                 scHost.setStackVersion(gson.fromJson(status.getStackVersion(), StackId.class));
               }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
index 01fbce2..445c996 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
@@ -119,56 +119,6 @@ class AccumuloScript(Script):
 
     # some accumulo components depend on the client, so update that too
     stack_select.select("accumulo-client", params.version)
-
-
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-
-    props_value_check = {}
-    props_empty_check = ['general.kerberos.keytab',
-                         'general.kerberos.principal']
-    props_read_check = ['general.kerberos.keytab']
-    accumulo_site_expectations = build_expectations('accumulo-site',
-      props_value_check, props_empty_check, props_read_check)
-
-    accumulo_expectations = {}
-    accumulo_expectations.update(accumulo_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.conf_dir,
-      {'accumulo-site.xml': FILE_TYPE_XML})
-
-    result_issues = validate_security_config_properties(security_params, accumulo_expectations)
-    if not result_issues:  # If all validations passed successfully
-      try:
-        # Double check the dict before calling execute
-        if ( 'accumulo-site' not in security_params
-             or 'general.kerberos.keytab' not in security_params['accumulo-site']
-             or 'general.kerberos.principal' not in security_params['accumulo-site']):
-          self.put_structured_out({"securityState": "UNSECURED"})
-          self.put_structured_out(
-            {"securityIssuesFound": "Keytab file or principal are not set property."})
-          return
-
-        cached_kinit_executor(status_params.kinit_path_local,
-          status_params.accumulo_user,
-          security_params['accumulo-site']['general.kerberos.keytab'],
-          security_params['accumulo-site']['general.kerberos.principal'],
-          status_params.hostname,
-          status_params.tmp_dir,
-          30)
-
-        self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-      except Exception as e:
-        self.put_structured_out({"securityState": "ERROR"})
-        self.put_structured_out({"securityStateErrorInfo": str(e)})
-    else:
-      issues = []
-      for cf in result_issues:
-        issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-      self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-      self.put_structured_out({"securityState": "UNSECURED"})
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
index 7073de6..fc2576d 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
@@ -77,71 +77,7 @@ class AmsCollector(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class AmsCollectorDefault(AmsCollector):
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hbase.security.authentication": "kerberos",
-                         "hbase.security.authorization": "true"}
-
-    props_empty_check = ["hbase.zookeeper.property.authProvider.1",
-                         "hbase.master.keytab.file",
-                         "hbase.master.kerberos.principal",
-                         "hbase.regionserver.keytab.file",
-                         "hbase.regionserver.kerberos.principal"
-                         ]
-    props_read_check = ['hbase.master.keytab.file', 'hbase.regionserver.keytab.file']
-    ams_hbase_site_expectations = build_expectations('hbase-site', props_value_check,
-                                                     props_empty_check,
-                                                     props_read_check)
-
-    expectations = {}
-    expectations.update(ams_hbase_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.ams_hbase_conf_dir,
-                                                 {'hbase-site.xml': FILE_TYPE_XML})
-
-    # In case of blueprint deployment security_status might be called before AMS collector is installed.
-    if ('hbase-site' not in security_params or 'hbase.cluster.distributed' not in security_params['hbase-site']) :
-      self.put_structured_out({"securityState": "UNKNOWN"})
-      return
-
-    is_hbase_distributed = security_params['hbase-site']['hbase.cluster.distributed']
-    # for embedded mode, when HBase is backed by file, security state is SECURED_KERBEROS by definition when cluster is secured
-    if status_params.security_enabled and not is_hbase_distributed:
-      self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-      return
-
-    result_issues = validate_security_config_properties(security_params, expectations)
-
-    if not result_issues:  # If all validations passed successfully
-      try:
-        # Double check the dict before calling execute
-        if ('hbase-site' not in security_params or
-                'hbase.master.keytab.file' not in security_params['hbase-site'] or
-                'hbase.master.kerberos.principal' not in security_params['hbase-site']):
-          self.put_structured_out({"securityState": "UNSECURED"})
-          self.put_structured_out(
-            {"securityIssuesFound": "Keytab file or principal are not set property."})
-          return
-
-        cached_kinit_executor(status_params.kinit_path_local,
-                              status_params.hbase_user,
-                              security_params['hbase-site']['hbase.master.keytab.file'],
-                              security_params['hbase-site']['hbase.master.kerberos.principal'],
-                              status_params.hostname,
-                              status_params.tmp_dir)
-        self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-      except Exception as e:
-        self.put_structured_out({"securityState": "ERROR"})
-        self.put_structured_out({"securityStateErrorInfo": str(e)})
-    else:
-      issues = []
-      for cf in result_issues:
-        issues.append("Configuration file %s did not pass the validation. Reason: %s" % (
-          cf, result_issues[cf]))
-      self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-      self.put_structured_out({"securityState": "UNSECURED"})
+  pass
 
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index 3c62243..1ef77cf 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -168,84 +168,6 @@ class MetadataServer(Script):
     env.set_params(status_params)
     check_process_status(status_params.pid_file)
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-
-    file_name_key = 'applicaton'
-    props_value_check = {'atlas.authentication.method': 'kerberos',
-                         'atlas.http.authentication.enabled': 'true',
-                         'atlas.http.authentication.type': 'kerberos'}
-    props_empty_check = ['atlas.authentication.principal',
-                         'atlas.authentication.keytab',
-                         'atlas.http.authentication.kerberos.principal',
-                         'atlas.http.authentication.kerberos.keytab']
-    props_read_check = ['atlas.authentication.keytab',
-                        'atlas.http.authentication.kerberos.keytab']
-
-    if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, status_params.version_for_stack_feature_checks):
-      file_name_key = 'atlas-application'
-      props_value_check = {'atlas.authentication.method.kerberos': 'true',
-                           'atlas.solr.kerberos.enable': 'true'}
-      props_empty_check = ['atlas.authentication.principal',
-                           'atlas.authentication.keytab',
-                           'atlas.authentication.method.kerberos.principal',
-                           'atlas.authentication.method.kerberos.keytab']
-      props_read_check = ['atlas.authentication.keytab',
-                          'atlas.authentication.method.kerberos.keytab']
-
-    atlas_site_expectations = build_expectations(file_name_key,
-                                                 props_value_check,
-                                                 props_empty_check,
-                                                 props_read_check)
-
-    atlas_expectations = {}
-    atlas_expectations.update(atlas_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.conf_dir,
-                                                 {status_params.conf_file: FILE_TYPE_PROPERTIES})
-    result_issues = validate_security_config_properties(security_params, atlas_expectations)
-
-    if not result_issues:  # If all validations passed successfully
-      try:
-        # Double check the dict before calling execute
-        if ( file_name_key not in security_params
-             or 'atlas.authentication.keytab' not in security_params[file_name_key]
-             or 'atlas.authentication.principal' not in security_params[file_name_key]):
-          self.put_structured_out({"securityState": "UNSECURED"})
-          self.put_structured_out(
-            {"securityIssuesFound": "Atlas service keytab file or principal are not set property."})
-          return
-
-        if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, status_params.version_for_stack_feature_checks):
-          if ( file_name_key not in security_params
-               or 'atlas.authentication.method.kerberos.keytab' not in security_params[file_name_key]
-               or 'atlas.authentication.method.kerberos.principal' not in security_params[file_name_key]):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Method Authentication keytab file or principal are not set property."})
-            return
-        else:
-          if ( file_name_key not in security_params
-               or 'atlas.http.authentication.kerberos.keytab' not in security_params[file_name_key]
-               or 'atlas.http.authentication.kerberos.principal' not in security_params[file_name_key]):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "HTTP Authentication keytab file or principal are not set property."})
-            return
-
-        self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-      except Exception as e:
-        self.put_structured_out({"securityState": "ERROR"})
-        self.put_structured_out({"securityStateErrorInfo": str(e)})
-    else:
-      issues = []
-      for cf in result_issues:
-        issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-      self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-      self.put_structured_out({"securityState": "UNSECURED"})
-
   def get_log_folder(self):
     import params
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
index 6d1dbc5..b0f517b 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
@@ -59,16 +59,6 @@ class FalconClientLinux(FalconClient):
     conf_select.select(params.stack_name, "falcon", params.version)
     stack_select.select("falcon-client", params.version)
 
-  def security_status(self, env):
-    import status_params
-    env.set_params(status_params)
-
-    if status_params.security_enabled:
-      self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
-
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class FalconClientWindows(FalconClient):
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
index c4960a7..23f9ef8 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
@@ -89,65 +89,6 @@ class FalconServerLinux(FalconServer):
 
     falcon_server_upgrade.pre_start_restore()
 
-  def security_status(self, env):
-    import status_params
-    env.set_params(status_params)
-    if status_params.security_enabled:
-      props_value_check = {"*.falcon.authentication.type": "kerberos",
-                           "*.falcon.http.authentication.type": "kerberos"}
-      props_empty_check = ["*.falcon.service.authentication.kerberos.principal",
-                           "*.falcon.service.authentication.kerberos.keytab",
-                           "*.falcon.http.authentication.kerberos.principal",
-                           "*.falcon.http.authentication.kerberos.keytab"]
-      props_read_check = ["*.falcon.service.authentication.kerberos.keytab",
-                          "*.falcon.http.authentication.kerberos.keytab"]
-      falcon_startup_props = build_expectations('startup', props_value_check, props_empty_check,
-                                                  props_read_check)
-
-      falcon_expectations ={}
-      falcon_expectations.update(falcon_startup_props)
-
-      security_params = get_params_from_filesystem('/etc/falcon/conf',
-                                                   {'startup.properties': FILE_TYPE_PROPERTIES})
-      result_issues = validate_security_config_properties(security_params, falcon_expectations)
-      if not result_issues: # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ( 'startup' not in security_params
-               or '*.falcon.service.authentication.kerberos.keytab' not in security_params['startup']
-               or '*.falcon.service.authentication.kerberos.principal' not in security_params['startup']) \
-            or '*.falcon.http.authentication.kerberos.keytab' not in security_params['startup'] \
-            or '*.falcon.http.authentication.kerberos.principal' not in security_params['startup']:
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.falcon_user,
-                                security_params['startup']['*.falcon.service.authentication.kerberos.keytab'],
-                                security_params['startup']['*.falcon.service.authentication.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.falcon_user,
-                                security_params['startup']['*.falcon.http.authentication.kerberos.keytab'],
-                                security_params['startup']['*.falcon.http.authentication.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
   def get_log_folder(self):
     import params
     return params.falcon_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
index d2c8089..83af3aa 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
@@ -98,55 +98,6 @@ class HbaseMasterDefault(HbaseMaster):
 
     check_process_status(status_params.hbase_master_pid_file)
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    if status_params.security_enabled:
-      props_value_check = {"hbase.security.authentication" : "kerberos",
-                           "hbase.security.authorization": "true"}
-      props_empty_check = ['hbase.master.keytab.file',
-                           'hbase.master.kerberos.principal']
-      props_read_check = ['hbase.master.keytab.file']
-      hbase_site_expectations = build_expectations('hbase-site', props_value_check, props_empty_check,
-                                                  props_read_check)
-
-      hbase_expectations = {}
-      hbase_expectations.update(hbase_site_expectations)
-
-      security_params = get_params_from_filesystem(status_params.hbase_conf_dir,
-                                                   {'hbase-site.xml': FILE_TYPE_XML})
-      result_issues = validate_security_config_properties(security_params, hbase_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ( 'hbase-site' not in security_params
-               or 'hbase.master.keytab.file' not in security_params['hbase-site']
-               or 'hbase.master.kerberos.principal' not in security_params['hbase-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hbase_user,
-                                security_params['hbase-site']['hbase.master.keytab.file'],
-                                security_params['hbase-site']['hbase.master.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-      
   def get_log_folder(self):
     import params
     return params.log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
index 226e7fd5..75910b1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
@@ -109,55 +109,6 @@ class HbaseRegionServerDefault(HbaseRegionServer):
 
     check_process_status(status_params.regionserver_pid_file)
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    if status_params.security_enabled:
-      props_value_check = {"hbase.security.authentication" : "kerberos",
-                           "hbase.security.authorization": "true"}
-      props_empty_check = ['hbase.regionserver.keytab.file',
-                           'hbase.regionserver.kerberos.principal']
-      props_read_check = ['hbase.regionserver.keytab.file']
-      hbase_site_expectations = build_expectations('hbase-site', props_value_check, props_empty_check,
-                                                   props_read_check)
-
-      hbase_expectations = {}
-      hbase_expectations.update(hbase_site_expectations)
-
-      security_params = get_params_from_filesystem(status_params.hbase_conf_dir,
-                                                   {'hbase-site.xml': FILE_TYPE_XML})
-      result_issues = validate_security_config_properties(security_params, hbase_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ( 'hbase-site' not in security_params
-               or 'hbase.regionserver.keytab.file' not in security_params['hbase-site']
-               or 'hbase.regionserver.kerberos.principal' not in security_params['hbase-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hbase_user,
-                                security_params['hbase-site']['hbase.regionserver.keytab.file'],
-                                security_params['hbase-site']['hbase.regionserver.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
   def get_log_folder(self):
     import params
     return params.log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
index 77820cc..8a85d6e 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
@@ -71,10 +71,6 @@ class PhoenixQueryServer(Script):
     import status_params
     env.set_params(status_params)
     phoenix_service('status')
-
-
-  def security_status(self, env):
-    self.put_structured_out({"securityState": "UNSECURED"})
     
   def get_log_folder(self):
     import params
@@ -89,4 +85,4 @@ class PhoenixQueryServer(Script):
     return [status_params.phoenix_pid_file]
 
 if __name__ == "__main__":
-  PhoenixQueryServer().execute()
\ No newline at end of file
+  PhoenixQueryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index cd52885..da03cce 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -143,64 +143,6 @@ class DataNodeDefault(DataNode):
     hdfs_binary = self.get_hdfs_binary()
     # ensure the DataNode has started and rejoined the cluster
     datanode_upgrade.post_upgrade_check(hdfs_binary)
-
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    props_value_check = None
-    props_empty_check = ['dfs.datanode.keytab.file',
-                         'dfs.datanode.kerberos.principal']
-    props_read_check = ['dfs.datanode.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(core_site_expectations)
-    hdfs_expectations.update(hdfs_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML,
-                                                  'hdfs-site.xml': FILE_TYPE_XML})
-
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ('hdfs-site' not in security_params or
-                  'dfs.datanode.keytab.file' not in security_params['hdfs-site'] or
-                  'dfs.datanode.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['dfs.datanode.keytab.file'],
-                                security_params['hdfs-site']['dfs.datanode.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 4dabdbc..51acc9e 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -66,51 +66,6 @@ class HdfsClientDefault(HdfsClient):
       conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-client", params.version)
 
-  def security_status(self, env):
-    import status_params
-    env.set_params(status_params)
-
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    hdfs_expectations ={}
-    hdfs_expectations.update(core_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                   {'core-site.xml': FILE_TYPE_XML})
-
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues: # If all validations passed successfully
-        if status_params.hdfs_user_principal or status_params.hdfs_user_keytab:
-          try:
-            cached_kinit_executor(status_params.kinit_path_local,
-                       status_params.hdfs_user,
-                       status_params.hdfs_user_keytab,
-                       status_params.hdfs_user_principal,
-                       status_params.hostname,
-                       status_params.tmp_dir)
-            self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-          except Exception as e:
-            self.put_structured_out({"securityState": "ERROR"})
-            self.put_structured_out({"securityStateErrorInfo": str(e)})
-        else:
-          self.put_structured_out({"securityIssuesFound": "hdfs principal and/or keytab file is not specified"})
-          self.put_structured_out({"securityState": "UNSECURED"})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
index 9448fa6..7fd8d70 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
@@ -105,63 +105,6 @@ class JournalNodeDefault(JournalNode):
     env.set_params(status_params)
     check_process_status(status_params.journalnode_pid_file)
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    props_value_check = None
-    props_empty_check = ['dfs.journalnode.keytab.file',
-                         'dfs.journalnode.kerberos.principal']
-    props_read_check = ['dfs.journalnode.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(hdfs_site_expectations)
-    hdfs_expectations.update(core_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML})
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ('hdfs-site' not in security_params or
-                  'dfs.journalnode.kerberos.keytab.file' not in security_params['hdfs-site'] or
-                  'dfs.journalnode.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['dfs.journalnode.kerberos.keytab.file'],
-                                security_params['hdfs-site']['dfs.journalnode.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-      
   def get_log_folder(self):
     import params
     return params.hdfs_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 1347f37..65cd378 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -216,63 +216,6 @@ class NameNodeDefault(NameNode):
             try_sleep=10
     )
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    props_value_check = None
-    props_empty_check = ['dfs.namenode.kerberos.internal.spnego.principal',
-                         'dfs.namenode.keytab.file',
-                         'dfs.namenode.kerberos.principal']
-    props_read_check = ['dfs.namenode.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(core_site_expectations)
-    hdfs_expectations.update(hdfs_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML,
-                                                  'hdfs-site.xml': FILE_TYPE_XML})
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ( 'hdfs-site' not in security_params
-               or 'dfs.namenode.keytab.file' not in security_params['hdfs-site']
-               or 'dfs.namenode.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['dfs.namenode.keytab.file'],
-                                security_params['hdfs-site']['dfs.namenode.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
   def rebalancehdfs(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
index 7ba1f96..fa451f4 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
@@ -76,64 +76,6 @@ class NFSGateway(Script):
     env.set_params(status_params)
 
     check_process_status(status_params.nfsgateway_pid_file)
-
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    props_value_check = None
-    props_empty_check = ['nfs.keytab.file',
-                         'nfs.kerberos.principal']
-    props_read_check = ['nfs.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(core_site_expectations)
-    hdfs_expectations.update(hdfs_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML,
-                                                  'hdfs-site.xml': FILE_TYPE_XML})
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ('hdfs-site' not in security_params or
-                'nfs.keytab.file' not in security_params['hdfs-site'] or
-                'nfs.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['nfs.keytab.file'],
-                                security_params['hdfs-site'][
-                                  'nfs.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
index 0f1f438..1408468 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
@@ -74,66 +74,6 @@ class SNameNodeDefault(SNameNode):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
-
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    props_value_check = None
-    props_empty_check = ['dfs.secondary.namenode.kerberos.internal.spnego.principal',
-                         'dfs.secondary.namenode.keytab.file',
-                         'dfs.secondary.namenode.kerberos.principal']
-    props_read_check = ['dfs.secondary.namenode.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(core_site_expectations)
-    hdfs_expectations.update(hdfs_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML,
-                                                  'hdfs-site.xml': FILE_TYPE_XML})
-
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ('hdfs-site' not in security_params or
-                  'dfs.secondary.namenode.keytab.file' not in security_params['hdfs-site'] or
-                  'dfs.secondary.namenode.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['dfs.secondary.namenode.keytab.file'],
-                                security_params['hdfs-site'][
-                                  'dfs.secondary.namenode.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index be0d2ed..ca5f605 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -119,49 +119,6 @@ class ZkfcSlaveDefault(ZkfcSlave):
     env.set_params(status_params)
     check_process_status(status_params.zkfc_pid_file)
 
-  def security_status(self, env):
-    import status_params
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    hdfs_expectations = {}
-    hdfs_expectations.update(core_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                   {'core-site.xml': FILE_TYPE_XML})
-    result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      if not result_issues:  # If all validations passed successfully
-        if status_params.hdfs_user_principal or status_params.hdfs_user_keytab:
-          try:
-            cached_kinit_executor(status_params.kinit_path_local,
-                                  status_params.hdfs_user,
-                                  status_params.hdfs_user_keytab,
-                                  status_params.hdfs_user_principal,
-                                  status_params.hostname,
-                                  status_params.tmp_dir)
-            self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-          except Exception as e:
-            self.put_structured_out({"securityState": "ERROR"})
-            self.put_structured_out({"securityStateErrorInfo": str(e)})
-        else:
-          self.put_structured_out(
-            {"securityIssuesFound": "hdfs principal and/or keytab file is not specified"})
-          self.put_structured_out({"securityState": "UNSECURED"})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
   def disable_security(self, env):
     import params
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
index 130c021..78a8f4b 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
@@ -98,64 +98,6 @@ class DataNodeDefault(DataNode):
     # ensure the DataNode has started and rejoined the cluster
     datanode_upgrade.post_upgrade_check(hdfs_binary)
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    props_value_check = None
-    props_empty_check = ['dfs.datanode.keytab.file',
-                         'dfs.datanode.kerberos.principal']
-    props_read_check = ['dfs.datanode.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(core_site_expectations)
-    hdfs_expectations.update(hdfs_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML,
-                                                  'hdfs-site.xml': FILE_TYPE_XML})
-
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ('hdfs-site' not in security_params or
-                  'dfs.datanode.keytab.file' not in security_params['hdfs-site'] or
-                  'dfs.datanode.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['dfs.datanode.keytab.file'],
-                                security_params['hdfs-site']['dfs.datanode.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-      
   def get_log_folder(self):
     import params
     return params.hdfs_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
index 4dabdbc..51acc9e 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
@@ -66,51 +66,6 @@ class HdfsClientDefault(HdfsClient):
       conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-client", params.version)
 
-  def security_status(self, env):
-    import status_params
-    env.set_params(status_params)
-
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-    hdfs_expectations ={}
-    hdfs_expectations.update(core_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                   {'core-site.xml': FILE_TYPE_XML})
-
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues: # If all validations passed successfully
-        if status_params.hdfs_user_principal or status_params.hdfs_user_keytab:
-          try:
-            cached_kinit_executor(status_params.kinit_path_local,
-                       status_params.hdfs_user,
-                       status_params.hdfs_user_keytab,
-                       status_params.hdfs_user_principal,
-                       status_params.hostname,
-                       status_params.tmp_dir)
-            self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-          except Exception as e:
-            self.put_structured_out({"securityState": "ERROR"})
-            self.put_structured_out({"securityStateErrorInfo": str(e)})
-        else:
-          self.put_structured_out({"securityIssuesFound": "hdfs principal and/or keytab file is not specified"})
-          self.put_structured_out({"securityState": "UNSECURED"})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b299641a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
index 9448fa6..7fd8d70 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
@@ -105,63 +105,6 @@ class JournalNodeDefault(JournalNode):
     env.set_params(status_params)
     check_process_status(status_params.journalnode_pid_file)
 
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    props_value_check = {"hadoop.security.authentication": "kerberos",
-                         "hadoop.security.authorization": "true"}
-    props_empty_check = ["hadoop.security.auth_to_local"]
-    props_read_check = None
-    core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    props_value_check = None
-    props_empty_check = ['dfs.journalnode.keytab.file',
-                         'dfs.journalnode.kerberos.principal']
-    props_read_check = ['dfs.journalnode.keytab.file']
-    hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
-                                                props_read_check)
-
-    hdfs_expectations = {}
-    hdfs_expectations.update(hdfs_site_expectations)
-    hdfs_expectations.update(core_site_expectations)
-
-    security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
-                                                 {'core-site.xml': FILE_TYPE_XML})
-    if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
-        security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
-      result_issues = validate_security_config_properties(security_params, hdfs_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ('hdfs-site' not in security_params or
-                  'dfs.journalnode.kerberos.keytab.file' not in security_params['hdfs-site'] or
-                  'dfs.journalnode.kerberos.principal' not in security_params['hdfs-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hdfs_user,
-                                security_params['hdfs-site']['dfs.journalnode.kerberos.keytab.file'],
-                                security_params['hdfs-site']['dfs.journalnode.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-      
   def get_log_folder(self):
     import params
     return params.hdfs_log_dir