You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by yu...@apache.org on 2015/01/22 23:16:41 UTC

[01/14] ambari git commit: AMBARI-9222. Kerberos wizard: Property description tweaks on configure Kerberos page. (jaimin)

Repository: ambari
Updated Branches:
  refs/heads/2.0-preview [created] be939d32e


http://git-wip-us.apache.org/repos/asf/ambari/blob/ae82067d/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 74912c3..992b278 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -20,4494 +20,4504 @@ var App = require('app');
 
 var hdp2properties = [
   //***************************************** HDP stack **************************************
-  /**********************************************HDFS***************************************/
-    {
-      "id": "site property",
-      "name": "dfs.namenode.checkpoint.dir",
-      "displayName": "SecondaryNameNode Checkpoint directories",
-      "defaultDirectory": "/hadoop/hdfs/namesecondary",
-      "displayType": "directories",
-      "isOverridable": false,
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "category": "SECONDARY_NAMENODE",
-      "index": 1
-    },
-    {
-      "id": "site property",
-      "name": "dfs.namenode.checkpoint.period",
-      "displayName": "HDFS Maximum Checkpoint Delay",
-      "displayType": "int",
-      "unit": "seconds",
-      "category": "General",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "index": 3
-    },
-    {
-      "id": "site property",
-      "name": "dfs.namenode.name.dir",
-      "displayName": "NameNode directories",
-      "defaultDirectory": "/hadoop/hdfs/namenode",
-      "displayType": "directories",
-      "isOverridable": false,
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "category": "NAMENODE",
-      "index": 1
-    },
-    {
-      "id": "site property",
-      "name": "dfs.webhdfs.enabled",
-      "displayName": "WebHDFS enabled",
-      "displayType": "checkbox",
-      "isOverridable": false,
-      "category": "General",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "index": 0
-    },
-    {
-      "id": "site property",
-      "name": "dfs.datanode.failed.volumes.tolerated",
-      "displayName": "DataNode volumes failure toleration",
-      "displayType": "int",
-      "category": "DATANODE",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "index": 3
-    },
-    {
-      "id": "site property",
-      "name": "dfs.datanode.data.dir.mount.file",
-      "displayName": "File that stores mount point for each data dir",
-      "description": "File path that contains the last known mount point for each data dir. This file is used to avoid creating a DFS data dir on the root drive (and filling it up) if a path was previously mounted on a drive.",
-      "defaultValue": "/etc/hadoop/conf/dfs_data_dir_mount.hist",
-      "displayType": "directory",
-      "isVisible": true,
-      "category": "DATANODE",
-      "serviceName": "HDFS",
-      "filename": "hadoop-env.xml",
-      "index": 4
-    },
-    {
-      "id": "site property",
-      "name": "dfs.datanode.data.dir",
-      "displayName": "DataNode directories",
-      "defaultDirectory": "/hadoop/hdfs/data",
-      "displayType": "directories",
-      "category": "DATANODE",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "index": 1
-    },
-    {
-      "id": "site property",
-      "name": "dfs.datanode.data.dir.perm",
-      "displayName": "DataNode directories permission",
-      "displayType": "int",
-      "category": "DATANODE",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "dfs.replication",
-      "displayName": "Block replication",
-      "displayType": "int",
-      "category": "General",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "dfs.datanode.du.reserved",
-      "displayName": "Reserved space for HDFS",
-      "displayType": "int",
-      "unit": "bytes",
-      "category": "General",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml",
-      "index": 2
-    },
-    {
-      "id": "site property",
-      "name": "dfs.client.read.shortcircuit",
-      "displayName": "HDFS Short-circuit read",
-      "displayType": "checkbox",
-      "category": "Advanced hdfs-site",
-      "serviceName": "HDFS",
-      "filename": "hdfs-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "apache_artifacts_download_url",
-      "displayName": "apache_artifacts_download_url",
-      "description": "",
-      "isRequired": false,
-      "isRequiredByAgent": false,
-      "isVisible": false,
-      "category": "Advanced hdfs-site",
-      "serviceName": "HDFS"
-    },
+/**********************************************HDFS***************************************/
+  {
+    "id": "site property",
+    "name": "dfs.namenode.checkpoint.dir",
+    "displayName": "SecondaryNameNode Checkpoint directories",
+    "defaultDirectory": "/hadoop/hdfs/namesecondary",
+    "displayType": "directories",
+    "isOverridable": false,
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "category": "SECONDARY_NAMENODE",
+    "index": 1
+  },
+  {
+    "id": "site property",
+    "name": "dfs.namenode.checkpoint.period",
+    "displayName": "HDFS Maximum Checkpoint Delay",
+    "displayType": "int",
+    "unit": "seconds",
+    "category": "General",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "index": 3
+  },
+  {
+    "id": "site property",
+    "name": "dfs.namenode.name.dir",
+    "displayName": "NameNode directories",
+    "defaultDirectory": "/hadoop/hdfs/namenode",
+    "displayType": "directories",
+    "isOverridable": false,
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "category": "NAMENODE",
+    "index": 1
+  },
+  {
+    "id": "site property",
+    "name": "dfs.webhdfs.enabled",
+    "displayName": "WebHDFS enabled",
+    "displayType": "checkbox",
+    "isOverridable": false,
+    "category": "General",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "index": 0
+  },
+  {
+    "id": "site property",
+    "name": "dfs.datanode.failed.volumes.tolerated",
+    "displayName": "DataNode volumes failure toleration",
+    "displayType": "int",
+    "category": "DATANODE",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "index": 3
+  },
+  {
+    "id": "site property",
+    "name": "dfs.datanode.data.dir.mount.file",
+    "displayName": "File that stores mount point for each data dir",
+    "description": "File path that contains the last known mount point for each data dir. This file is used to avoid creating a DFS data dir on the root drive (and filling it up) if a path was previously mounted on a drive.",
+    "defaultValue": "/etc/hadoop/conf/dfs_data_dir_mount.hist",
+    "displayType": "directory",
+    "isVisible": true,
+    "category": "DATANODE",
+    "serviceName": "HDFS",
+    "filename": "hadoop-env.xml",
+    "index": 4
+  },
+  {
+    "id": "site property",
+    "name": "dfs.datanode.data.dir",
+    "displayName": "DataNode directories",
+    "defaultDirectory": "/hadoop/hdfs/data",
+    "displayType": "directories",
+    "category": "DATANODE",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "index": 1
+  },
+  {
+    "id": "site property",
+    "name": "dfs.datanode.data.dir.perm",
+    "displayName": "DataNode directories permission",
+    "displayType": "int",
+    "category": "DATANODE",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "dfs.replication",
+    "displayName": "Block replication",
+    "displayType": "int",
+    "category": "General",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "dfs.datanode.du.reserved",
+    "displayName": "Reserved space for HDFS",
+    "displayType": "int",
+    "unit": "bytes",
+    "category": "General",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml",
+    "index": 2
+  },
+  {
+    "id": "site property",
+    "name": "dfs.client.read.shortcircuit",
+    "displayName": "HDFS Short-circuit read",
+    "displayType": "checkbox",
+    "category": "Advanced hdfs-site",
+    "serviceName": "HDFS",
+    "filename": "hdfs-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "apache_artifacts_download_url",
+    "displayName": "apache_artifacts_download_url",
+    "description": "",
+    "isRequired": false,
+    "isRequiredByAgent": false,
+    "isVisible": false,
+    "category": "Advanced hdfs-site",
+    "serviceName": "HDFS"
+  },
 
-  /**********************************************YARN***************************************/
-    {
-      "id": "site property",
-      "name": "yarn.acl.enable",
-      "displayName": "yarn.acl.enable",
-      "displayType": "checkbox",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "RESOURCEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.admin.acl",
-      "displayName": "yarn.admin.acl",
-      "isRequired": false,
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "RESOURCEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.log-aggregation-enable",
-      "displayName": "yarn.log-aggregation-enable",
-      "displayType": "checkbox",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "RESOURCEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.resourcemanager.scheduler.class",
-      "displayName": "yarn.resourcemanager.scheduler.class",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "CapacityScheduler"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.scheduler.minimum-allocation-mb",
-      "displayName": "yarn.scheduler.minimum-allocation-mb",
-      "displayType": "int",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "CapacityScheduler"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.scheduler.maximum-allocation-mb",
-      "displayName": "yarn.scheduler.maximum-allocation-mb",
-      "displayType": "int",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "CapacityScheduler"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.resource.memory-mb",
-      "displayName": "yarn.nodemanager.resource.memory-mb",
-      "displayType": "int",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.vmem-pmem-ratio",
-      "displayName": "yarn.nodemanager.vmem-pmem-ratio",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.linux-container-executor.group",
-      "displayName": "yarn.nodemanager.linux-container-executor.group",
-      "serviceName": "YARN",
-      "category": "NODEMANAGER",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.log-dirs",
-      "displayName": "yarn.nodemanager.log-dirs",
-      "defaultDirectory": "/hadoop/yarn/log",
-      "displayType": "directories",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.local-dirs",
-      "displayName": "yarn.nodemanager.local-dirs",
-      "defaultDirectory": "/hadoop/yarn/local",
-      "displayType": "directories",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.remote-app-log-dir",
-      "displayName": "yarn.nodemanager.remote-app-log-dir",
-      "displayType": "directory",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.remote-app-log-dir-suffix",
-      "displayName": "yarn.nodemanager.remote-app-log-dir-suffix",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.aux-services",
-      "displayName": "yarn.nodemanager.aux-services",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.nodemanager.log.retain-second",
-      "displayName": "yarn.nodemanager.log.retain-second",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml",
-      "category": "NODEMANAGER"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.log.server.url",
-      "displayName": "yarn.log.server.url",
-      "category": "Advanced yarn-site",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.enabled",
-      "displayName": "yarn.timeline-service.enabled",
-      "category": "APP_TIMELINE_SERVER",
-      "displayType": "checkbox",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.leveldb-timeline-store.path",
-      "displayName": "yarn.timeline-service.leveldb-timeline-store.path",
-      "defaultDirectory": "/hadoop/yarn/timeline",
-      "category": "APP_TIMELINE_SERVER",
-      "displayType": "directory",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms",
-      "displayName": "yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms",
-      "displayType": "int",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.store-class",
-      "displayName": "yarn.timeline-service.store-class",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.ttl-enable",
-      "displayName": "yarn.timeline-service.ttl-enable",
-      "displayType": "checkbox",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.ttl-ms",
-      "displayName": "yarn.timeline-service.ttl-ms",
-      "displayType": "int",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.generic-application-history.store-class",
-      "displayName": "yarn.timeline-service.generic-application-history.store-class",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.webapp.address",
-      "displayName": "yarn.timeline-service.webapp.address",
-      "displayType": "string",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.webapp.https.address",
-      "displayName": "yarn.timeline-service.webapp.https.address",
-      "displayType": "string",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.timeline-service.address",
-      "displayName": "yarn.timeline-service.address",
-      "displayType": "string",
-      "category": "APP_TIMELINE_SERVER",
-      "serviceName": "YARN",
-      "filename": "yarn-site.xml"
-    },
-  /**********************************************MAPREDUCE2***************************************/
-    {
-      "id": "site property",
-      "name": "mapreduce.map.memory.mb",
-      "displayName": "Default virtual memory for a job's map-task",
-      "displayType": "int",
-      "unit": "MB",
-      "category": "General",
-      "serviceName": "MAPREDUCE2",
-      "filename": "mapred-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "mapreduce.reduce.memory.mb",
-      "displayName": "Default virtual memory for a job's reduce-task",
-      "displayType": "int",
-      "unit": "MB",
-      "category": "General",
-      "serviceName": "MAPREDUCE2",
-      "filename": "mapred-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "mapreduce.task.io.sort.mb",
-      "displayName": "Map-side sort buffer memory",
-      "displayType": "int",
-      "unit": "MB",
-      "category": "General",
-      "serviceName": "MAPREDUCE2",
-      "filename": "mapred-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hadoop.security.auth_to_local",
-      "displayName": "hadoop.security.auth_to_local",
-      "displayType": "multiLine",
-      "serviceName": "HDFS",
-      "filename": "core-site.xml",
-      "category": "Advanced core-site"
-    },
-    {
-      "id": "site property",
-      "name": "yarn.app.mapreduce.am.resource.mb",
-      "displayName": "yarn.app.mapreduce.am.resource.mb",
-      "displayType": "int",
-      "category": "Advanced mapred-site",
-      "serviceName": "MAPREDUCE2",
-      "filename": "mapred-site.xml"
-    },
+/**********************************************YARN***************************************/
+  {
+    "id": "site property",
+    "name": "yarn.acl.enable",
+    "displayName": "yarn.acl.enable",
+    "displayType": "checkbox",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "RESOURCEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.admin.acl",
+    "displayName": "yarn.admin.acl",
+    "isRequired": false,
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "RESOURCEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.log-aggregation-enable",
+    "displayName": "yarn.log-aggregation-enable",
+    "displayType": "checkbox",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "RESOURCEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.resourcemanager.scheduler.class",
+    "displayName": "yarn.resourcemanager.scheduler.class",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "CapacityScheduler"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.scheduler.minimum-allocation-mb",
+    "displayName": "yarn.scheduler.minimum-allocation-mb",
+    "displayType": "int",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "CapacityScheduler"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.scheduler.maximum-allocation-mb",
+    "displayName": "yarn.scheduler.maximum-allocation-mb",
+    "displayType": "int",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "CapacityScheduler"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.resource.memory-mb",
+    "displayName": "yarn.nodemanager.resource.memory-mb",
+    "displayType": "int",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.vmem-pmem-ratio",
+    "displayName": "yarn.nodemanager.vmem-pmem-ratio",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.linux-container-executor.group",
+    "displayName": "yarn.nodemanager.linux-container-executor.group",
+    "serviceName": "YARN",
+    "category": "NODEMANAGER",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.log-dirs",
+    "displayName": "yarn.nodemanager.log-dirs",
+    "defaultDirectory": "/hadoop/yarn/log",
+    "displayType": "directories",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.local-dirs",
+    "displayName": "yarn.nodemanager.local-dirs",
+    "defaultDirectory": "/hadoop/yarn/local",
+    "displayType": "directories",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.remote-app-log-dir",
+    "displayName": "yarn.nodemanager.remote-app-log-dir",
+    "displayType": "directory",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.remote-app-log-dir-suffix",
+    "displayName": "yarn.nodemanager.remote-app-log-dir-suffix",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.aux-services",
+    "displayName": "yarn.nodemanager.aux-services",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.nodemanager.log.retain-second",
+    "displayName": "yarn.nodemanager.log.retain-second",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml",
+    "category": "NODEMANAGER"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.log.server.url",
+    "displayName": "yarn.log.server.url",
+    "category": "Advanced yarn-site",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.enabled",
+    "displayName": "yarn.timeline-service.enabled",
+    "category": "APP_TIMELINE_SERVER",
+    "displayType": "checkbox",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.leveldb-timeline-store.path",
+    "displayName": "yarn.timeline-service.leveldb-timeline-store.path",
+    "defaultDirectory": "/hadoop/yarn/timeline",
+    "category": "APP_TIMELINE_SERVER",
+    "displayType": "directory",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms",
+    "displayName": "yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms",
+    "displayType": "int",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.store-class",
+    "displayName": "yarn.timeline-service.store-class",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.ttl-enable",
+    "displayName": "yarn.timeline-service.ttl-enable",
+    "displayType": "checkbox",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.ttl-ms",
+    "displayName": "yarn.timeline-service.ttl-ms",
+    "displayType": "int",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.generic-application-history.store-class",
+    "displayName": "yarn.timeline-service.generic-application-history.store-class",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.webapp.address",
+    "displayName": "yarn.timeline-service.webapp.address",
+    "displayType": "string",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.webapp.https.address",
+    "displayName": "yarn.timeline-service.webapp.https.address",
+    "displayType": "string",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.timeline-service.address",
+    "displayName": "yarn.timeline-service.address",
+    "displayType": "string",
+    "category": "APP_TIMELINE_SERVER",
+    "serviceName": "YARN",
+    "filename": "yarn-site.xml"
+  },
+/**********************************************MAPREDUCE2***************************************/
+  {
+    "id": "site property",
+    "name": "mapreduce.map.memory.mb",
+    "displayName": "Default virtual memory for a job's map-task",
+    "displayType": "int",
+    "unit": "MB",
+    "category": "General",
+    "serviceName": "MAPREDUCE2",
+    "filename": "mapred-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "mapreduce.reduce.memory.mb",
+    "displayName": "Default virtual memory for a job's reduce-task",
+    "displayType": "int",
+    "unit": "MB",
+    "category": "General",
+    "serviceName": "MAPREDUCE2",
+    "filename": "mapred-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "mapreduce.task.io.sort.mb",
+    "displayName": "Map-side sort buffer memory",
+    "displayType": "int",
+    "unit": "MB",
+    "category": "General",
+    "serviceName": "MAPREDUCE2",
+    "filename": "mapred-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hadoop.security.auth_to_local",
+    "displayName": "hadoop.security.auth_to_local",
+    "displayType": "multiLine",
+    "serviceName": "HDFS",
+    "filename": "core-site.xml",
+    "category": "Advanced core-site"
+  },
+  {
+    "id": "site property",
+    "name": "yarn.app.mapreduce.am.resource.mb",
+    "displayName": "yarn.app.mapreduce.am.resource.mb",
+    "displayType": "int",
+    "category": "Advanced mapred-site",
+    "serviceName": "MAPREDUCE2",
+    "filename": "mapred-site.xml"
+  },
 
-  /**********************************************oozie-site***************************************/
-    {
-      "id": "site property",
-      "name": "oozie.db.schema.name",
-      "displayName": "Database Name",
-      "isOverridable": false,
-      "displayType": "host",
-      "isObserved": true,
-      "category": "OOZIE_SERVER",
-      "serviceName": "OOZIE",
-      "filename": "oozie-site.xml",
-      "index": 4
-    },
-    {
-      "id": "site property",
-      "name": "oozie.service.JPAService.jdbc.username",
-      "displayName": "Database Username",
-      "isOverridable": false,
-      "displayType": "user",
-      "category": "OOZIE_SERVER",
-      "serviceName": "OOZIE",
-      "filename": "oozie-site.xml",
-      "index": 5
-    },
-    {
-      "id": "site property",
-      "name": "oozie.service.JPAService.jdbc.password",
-      "displayName": "Database Password",
-      "isOverridable": false,
-      "displayType": "password",
-      "category": "OOZIE_SERVER",
-      "serviceName": "OOZIE",
-      "filename": "oozie-site.xml",
-      "index": 6
-    },
-    {
-      "id": "site property",
-      "name": "oozie.service.JPAService.jdbc.driver", // the default value of this property is overriden in code
-      "displayName": "JDBC Driver Class",
-      "isOverridable": false,
-      "category": "OOZIE_SERVER",
-      "serviceName": "OOZIE",
-      "filename": "oozie-site.xml",
-      "index": 7
-    },
-    {
-      "id": "site property",
-      "name": "oozie.service.JPAService.jdbc.url",
-      "displayName": "Database URL",
-      "isOverridable": false,
-      "displayType": "advanced",
-      "category": "OOZIE_SERVER",
-      "serviceName": "OOZIE",
-      "filename": "oozie-site.xml",
-      "index": 8
-    },
+/**********************************************oozie-site***************************************/
+  {
+    "id": "site property",
+    "name": "oozie.db.schema.name",
+    "displayName": "Database Name",
+    "isOverridable": false,
+    "displayType": "host",
+    "isObserved": true,
+    "category": "OOZIE_SERVER",
+    "serviceName": "OOZIE",
+    "filename": "oozie-site.xml",
+    "index": 4
+  },
+  {
+    "id": "site property",
+    "name": "oozie.service.JPAService.jdbc.username",
+    "displayName": "Database Username",
+    "isOverridable": false,
+    "displayType": "user",
+    "category": "OOZIE_SERVER",
+    "serviceName": "OOZIE",
+    "filename": "oozie-site.xml",
+    "index": 5
+  },
+  {
+    "id": "site property",
+    "name": "oozie.service.JPAService.jdbc.password",
+    "displayName": "Database Password",
+    "isOverridable": false,
+    "displayType": "password",
+    "category": "OOZIE_SERVER",
+    "serviceName": "OOZIE",
+    "filename": "oozie-site.xml",
+    "index": 6
+  },
+  {
+    "id": "site property",
+    "name": "oozie.service.JPAService.jdbc.driver", // the default value of this property is overriden in code
+    "displayName": "JDBC Driver Class",
+    "isOverridable": false,
+    "category": "OOZIE_SERVER",
+    "serviceName": "OOZIE",
+    "filename": "oozie-site.xml",
+    "index": 7
+  },
+  {
+    "id": "site property",
+    "name": "oozie.service.JPAService.jdbc.url",
+    "displayName": "Database URL",
+    "isOverridable": false,
+    "displayType": "advanced",
+    "category": "OOZIE_SERVER",
+    "serviceName": "OOZIE",
+    "filename": "oozie-site.xml",
+    "index": 8
+  },
 
-  /**********************************************hive-site***************************************/
-    {
-      "id": "site property",
-      "name": "javax.jdo.option.ConnectionDriverName",  // the default value is overwritten in code
-      "displayName": "JDBC Driver Class",
-      "isOverridable": false,
-      "category": "HIVE_METASTORE",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "index": 7
-    },
-    {
-      "id": "site property",
-      "name": "hive.heapsize",
-      "displayName": "Hive heap size",
-      "displayType": "int",
-      "unit": "MB",
-      "isOverridable": false,
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "category": "General",
-      "index": 9
-    },
-    {
-      "id": "site property",
-      "name": "javax.jdo.option.ConnectionUserName",
-      "displayName": "Database Username",
-      "displayType": "user",
-      "isOverridable": false,
-      "category": "HIVE_METASTORE",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "index": 5
-    },
-    {
-      "id": "site property",
-      "name": "javax.jdo.option.ConnectionPassword",
-      "displayName": "Database Password",
-      "displayType": "password",
-      "isOverridable": false,
-      "category": "HIVE_METASTORE",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "index": 6
-    },
-    {
-      "id": "site property",
-      "name": "javax.jdo.option.ConnectionURL",
-      "displayName": "Database URL",
-      "displayType": "advanced",
-      "isOverridable": false,
-      "category": "HIVE_METASTORE",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "index": 8
-    },
-    {
-      "id": "site property",
-      "name": "ambari.hive.db.schema.name",
-      "displayName": "Database Name",
-      "displayType": "host",
-      "isOverridable": false,
-      "isObserved": true,
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "category": "HIVE_METASTORE",
-      "index": 4
-    },
-    {
-      "id": "site property",
-      "name": "hive.server2.tez.default.queues",
-      "displayName": "hive.server2.tez.default.queues",
-      "isRequired": false,
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml",
-      "category": "Advanced hive-site"
-    },
-    {
-      "id": "site property",
-      "name": "hive.server2.thrift.port",
-      "displayName": "Hive Server Port",
-      "description": "TCP port number to listen on, default 10000.",
-      "defaultValue": "10000",
-      "displayType": "int",
-      "isReconfigurable": true,
-      "isOverridable": false,
-      "isVisible": true,
-      "category": "Advanced hive-site",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hive.server2.support.dynamic.service.discovery",
-      "displayName": "hive.server2.support.dynamic.service.discovery",
-      "defaultValue": true,
-      "displayType": "checkbox",
-      "category": "Advanced hive-site",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hive.security.authorization.enabled",
-      "displayName": "hive.security.authorization.enabled",
-      "defaultValue": false,
-      "displayType": "checkbox",
-      "category": "Advanced hive-site",
-      "serviceName": "HIVE",
-      "filename": "hive-site.xml"
-    },
-  /**********************************************tez-site*****************************************/
-    {
-      "id": "site property",
-      "name": "tez.am.resource.memory.mb",
-      "displayName": "tez.am.resource.memory.mb",
-      "displayType": "int",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.am.java.opts",
-      "displayName": "tez.am.java.opts",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.am.grouping.split-waves",
-      "displayName": "tez.am.grouping.split-waves",
-      "displayType": "float",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.am.grouping.min-size",
-      "displayName": "tez.am.grouping.min-size",
-      "displayType": "int",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.am.grouping.max-size",
-      "displayName": "tez.am.grouping.max-size",
-      "displayType": "int",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.am.log.level",
-      "displayName": "tez.am.log.level",
-      "displayType": "string",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.runtime.intermediate-input.compress.codec",
-      "displayName": "tez.runtime.intermediate-input.compress.codec",
-      "displayType": "string",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.runtime.intermediate-input.is-compressed",
-      "displayName": "tez.runtime.intermediate-input.is-compressed",
-      "displayType": "checkbox",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.runtime.intermediate-output.compress.codec",
-      "displayName": "tez.runtime.intermediate-output.compress.codec",
-      "displayType": "string",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "tez.runtime.intermediate-output.should-compress",
-      "displayName": "tez.runtime.intermediate-output.should-compress",
-      "displayType": "checkbox",
-      "category": "General",
-      "serviceName": "TEZ",
-      "filename": "tez-site.xml"
-    },
+/**********************************************hive-site***************************************/
+  {
+    "id": "site property",
+    "name": "javax.jdo.option.ConnectionDriverName",  // the default value is overwritten in code
+    "displayName": "JDBC Driver Class",
+    "isOverridable": false,
+    "category": "HIVE_METASTORE",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "index": 7
+  },
+  {
+    "id": "site property",
+    "name": "hive.heapsize",
+    "displayName": "Hive heap size",
+    "displayType": "int",
+    "unit": "MB",
+    "isOverridable": false,
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "category": "General",
+    "index": 9
+  },
+  {
+    "id": "site property",
+    "name": "javax.jdo.option.ConnectionUserName",
+    "displayName": "Database Username",
+    "displayType": "user",
+    "isOverridable": false,
+    "category": "HIVE_METASTORE",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "index": 5
+  },
+  {
+    "id": "site property",
+    "name": "javax.jdo.option.ConnectionPassword",
+    "displayName": "Database Password",
+    "displayType": "password",
+    "isOverridable": false,
+    "category": "HIVE_METASTORE",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "index": 6
+  },
+  {
+    "id": "site property",
+    "name": "javax.jdo.option.ConnectionURL",
+    "displayName": "Database URL",
+    "displayType": "advanced",
+    "isOverridable": false,
+    "category": "HIVE_METASTORE",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "index": 8
+  },
+  {
+    "id": "site property",
+    "name": "ambari.hive.db.schema.name",
+    "displayName": "Database Name",
+    "displayType": "host",
+    "isOverridable": false,
+    "isObserved": true,
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "category": "HIVE_METASTORE",
+    "index": 4
+  },
+  {
+    "id": "site property",
+    "name": "hive.server2.tez.default.queues",
+    "displayName": "hive.server2.tez.default.queues",
+    "isRequired": false,
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml",
+    "category": "Advanced hive-site"
+  },
+  {
+    "id": "site property",
+    "name": "hive.server2.thrift.port",
+    "displayName": "Hive Server Port",
+    "description": "TCP port number to listen on, default 10000.",
+    "defaultValue": "10000",
+    "displayType": "int",
+    "isReconfigurable": true,
+    "isOverridable": false,
+    "isVisible": true,
+    "category": "Advanced hive-site",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hive.server2.support.dynamic.service.discovery",
+    "displayName": "hive.server2.support.dynamic.service.discovery",
+    "defaultValue": true,
+    "displayType": "checkbox",
+    "category": "Advanced hive-site",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hive.security.authorization.enabled",
+    "displayName": "hive.security.authorization.enabled",
+    "defaultValue": false,
+    "displayType": "checkbox",
+    "category": "Advanced hive-site",
+    "serviceName": "HIVE",
+    "filename": "hive-site.xml"
+  },
+/**********************************************tez-site*****************************************/
+  {
+    "id": "site property",
+    "name": "tez.am.resource.memory.mb",
+    "displayName": "tez.am.resource.memory.mb",
+    "displayType": "int",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.am.java.opts",
+    "displayName": "tez.am.java.opts",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.am.grouping.split-waves",
+    "displayName": "tez.am.grouping.split-waves",
+    "displayType": "float",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.am.grouping.min-size",
+    "displayName": "tez.am.grouping.min-size",
+    "displayType": "int",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.am.grouping.max-size",
+    "displayName": "tez.am.grouping.max-size",
+    "displayType": "int",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.am.log.level",
+    "displayName": "tez.am.log.level",
+    "displayType": "string",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.runtime.intermediate-input.compress.codec",
+    "displayName": "tez.runtime.intermediate-input.compress.codec",
+    "displayType": "string",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.runtime.intermediate-input.is-compressed",
+    "displayName": "tez.runtime.intermediate-input.is-compressed",
+    "displayType": "checkbox",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.runtime.intermediate-output.compress.codec",
+    "displayName": "tez.runtime.intermediate-output.compress.codec",
+    "displayType": "string",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "tez.runtime.intermediate-output.should-compress",
+    "displayName": "tez.runtime.intermediate-output.should-compress",
+    "displayType": "checkbox",
+    "category": "General",
+    "serviceName": "TEZ",
+    "filename": "tez-site.xml"
+  },
 
-  /**********************************************hbase-site***************************************/
-    {
-      "id": "site property",
-      "name": "hbase.tmp.dir",
-      "displayName": "HBase local directory",
-      "defaultDirectory": "/hadoop/hbase",
-      "displayType": "directory",
-      "category": "Advanced hbase-site",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.master.port",
-      "displayName": "HBase Master Port",
-      "isReconfigurable": true,
-      "displayType": "int",
-      "isOverridable": false,
-      "isVisible": true,
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "category": "Advanced hbase-site"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.regionserver.global.memstore.upperLimit",
-      "displayName": "hbase.regionserver.global.memstore.upperLimit",
-      "displayType": "float",
-      "category": "Advanced hbase-site",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.regionserver.global.memstore.lowerLimit",
-      "displayName": "hbase.regionserver.global.memstore.lowerLimit",
-      "displayType": "float",
-      "category": "Advanced hbase-site",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hstore.blockingStoreFiles",
-      "displayName": "hstore blocking storefiles",
-      "displayType": "int",
-      "category": "Advanced hbase-site",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hstore.compactionThreshold",
-      "displayName": "HBase HStore compaction threshold",
-      "displayType": "int",
-      "category": "General",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 0
-    },
-    {
-      "id": "site property",
-      "name": "hfile.block.cache.size",
-      "displayName": "HFile block cache size ",
-      "displayType": "float",
-      "category": "General",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 1
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hregion.max.filesize",
-      "displayName": "Maximum HStoreFile Size",
-      "displayType": "int",
-      "unit": "bytes",
-      "category": "General",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 2
-    },
-    {
-      "id": "site property",
-      "name": "hbase.regionserver.handler.count",
-      "displayName": "RegionServer Handler",
-      "displayType": "int",
-      "category": "HBASE_REGIONSERVER",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 2
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hregion.majorcompaction",
-      "displayName": "HBase Region Major Compaction",
-      "displayType": "int",
-      "unit": "ms",
-      "category": "HBASE_REGIONSERVER",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 3
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hregion.memstore.block.multiplier",
-      "displayName": "HBase Region Block Multiplier",
-      "displayType": "int",
-      "category": "HBASE_REGIONSERVER",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 4
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hregion.memstore.mslab.enabled",
-      "displayName": "hbase.hregion.memstore.mslab.enabled",
-      "displayType": "checkbox",
-      "category": "Advanced hbase-site",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.hregion.memstore.flush.size",
-      "displayName": "HBase Region Memstore Flush Size",
-      "displayType": "int",
-      "unit": "bytes",
-      "category": "HBASE_REGIONSERVER",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 5
-    },
-    {
-      "id": "site property",
-      "name": "hbase.client.scanner.caching",
-      "displayName": "HBase Client Scanner Caching",
-      "displayType": "int",
-      "unit": "rows",
-      "category": "General",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 3
-    },
-    {
-      "id": "site property",
-      "name": "zookeeper.session.timeout",
-      "displayName": "Zookeeper timeout for HBase Session",
-      "displayType": "int",
-      "unit": "ms",
-      "category": "General",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 4
-    },
-    {
-      "id": "site property",
-      "name": "hbase.client.keyvalue.maxsize",
-      "displayName": "HBase Client Maximum key-value Size",
-      "displayType": "int",
-      "unit": "bytes",
-      "category": "General",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "index": 5
-    },
-    {
-      "id": "site property",
-      "name": "hbase.coprocessor.region.classes",
-      "displayName": "hbase.coprocessor.region.classes",
-      "category": "Advanced hbase-site",
-      "isRequired": false,
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.coprocessor.master.classes",
-      "displayName": "hbase.coprocessor.master.classes",
-      "category": "Advanced hbase-site",
-      "isRequired": false,
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "hbase.zookeeper.quorum",
-      "displayName": "hbase.zookeeper.quorum",
-      "displayType": "multiLine",
-      "serviceName": "HBASE",
-      "filename": "hbase-site.xml",
-      "category": "Advanced hbase-site"
-    },
+/**********************************************hbase-site***************************************/
+  {
+    "id": "site property",
+    "name": "hbase.tmp.dir",
+    "displayName": "HBase local directory",
+    "defaultDirectory": "/hadoop/hbase",
+    "displayType": "directory",
+    "category": "Advanced hbase-site",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.master.port",
+    "displayName": "HBase Master Port",
+    "isReconfigurable": true,
+    "displayType": "int",
+    "isOverridable": false,
+    "isVisible": true,
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "category": "Advanced hbase-site"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.regionserver.global.memstore.upperLimit",
+    "displayName": "hbase.regionserver.global.memstore.upperLimit",
+    "displayType": "float",
+    "category": "Advanced hbase-site",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.regionserver.global.memstore.lowerLimit",
+    "displayName": "hbase.regionserver.global.memstore.lowerLimit",
+    "displayType": "float",
+    "category": "Advanced hbase-site",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hstore.blockingStoreFiles",
+    "displayName": "hstore blocking storefiles",
+    "displayType": "int",
+    "category": "Advanced hbase-site",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hstore.compactionThreshold",
+    "displayName": "HBase HStore compaction threshold",
+    "displayType": "int",
+    "category": "General",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 0
+  },
+  {
+    "id": "site property",
+    "name": "hfile.block.cache.size",
+    "displayName": "HFile block cache size ",
+    "displayType": "float",
+    "category": "General",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 1
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hregion.max.filesize",
+    "displayName": "Maximum HStoreFile Size",
+    "displayType": "int",
+    "unit": "bytes",
+    "category": "General",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 2
+  },
+  {
+    "id": "site property",
+    "name": "hbase.regionserver.handler.count",
+    "displayName": "RegionServer Handler",
+    "displayType": "int",
+    "category": "HBASE_REGIONSERVER",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 2
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hregion.majorcompaction",
+    "displayName": "HBase Region Major Compaction",
+    "displayType": "int",
+    "unit": "ms",
+    "category": "HBASE_REGIONSERVER",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 3
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hregion.memstore.block.multiplier",
+    "displayName": "HBase Region Block Multiplier",
+    "displayType": "int",
+    "category": "HBASE_REGIONSERVER",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 4
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hregion.memstore.mslab.enabled",
+    "displayName": "hbase.hregion.memstore.mslab.enabled",
+    "displayType": "checkbox",
+    "category": "Advanced hbase-site",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.hregion.memstore.flush.size",
+    "displayName": "HBase Region Memstore Flush Size",
+    "displayType": "int",
+    "unit": "bytes",
+    "category": "HBASE_REGIONSERVER",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 5
+  },
+  {
+    "id": "site property",
+    "name": "hbase.client.scanner.caching",
+    "displayName": "HBase Client Scanner Caching",
+    "displayType": "int",
+    "unit": "rows",
+    "category": "General",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 3
+  },
+  {
+    "id": "site property",
+    "name": "zookeeper.session.timeout",
+    "displayName": "Zookeeper timeout for HBase Session",
+    "displayType": "int",
+    "unit": "ms",
+    "category": "General",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 4
+  },
+  {
+    "id": "site property",
+    "name": "hbase.client.keyvalue.maxsize",
+    "displayName": "HBase Client Maximum key-value Size",
+    "displayType": "int",
+    "unit": "bytes",
+    "category": "General",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "index": 5
+  },
+  {
+    "id": "site property",
+    "name": "hbase.coprocessor.region.classes",
+    "displayName": "hbase.coprocessor.region.classes",
+    "category": "Advanced hbase-site",
+    "isRequired": false,
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.coprocessor.master.classes",
+    "displayName": "hbase.coprocessor.master.classes",
+    "category": "Advanced hbase-site",
+    "isRequired": false,
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "hbase.zookeeper.quorum",
+    "displayName": "hbase.zookeeper.quorum",
+    "displayType": "multiLine",
+    "serviceName": "HBASE",
+    "filename": "hbase-site.xml",
+    "category": "Advanced hbase-site"
+  },
 
-  /**********************************************storm-site***************************************/
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.root",
-      "displayName": "storm.zookeeper.root",
-      "displayType": "directory",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.local.dir",
-      "displayName": "storm.local.dir",
-      "defaultDirectory": "/hadoop/storm",
-      "displayType": "directory",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.servers",
-      "displayName": "storm.zookeeper.servers",
-      "displayType": "masterHosts",
-      "isOverridable": false,
-      "isReconfigurable": false,
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.port",
-      "displayName": "storm.zookeeper.port",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.session.timeout",
-      "displayName": "storm.zookeeper.session.timeout",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.connection.timeout",
-      "displayName": "storm.zookeeper.connection.timeout",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.retry.times",
-      "displayName": "storm.zookeeper.retry.times",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.retry.interval",
-      "displayName": "storm.zookeeper.retry.interval",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General",
-      "unit": "ms"
-    },
-    {
-      "id": "site property",
-      "name": "storm.zookeeper.retry.intervalceiling.millis",
-      "displayName": "storm.zookeeper.retry.intervalceiling.millis",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General",
-      "unit": "ms"
-    },
-    {
-      "id": "site property",
-      "name": "storm.cluster.mode",
-      "displayName": "storm.cluster.mode",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.local.mode.zmq",
-      "displayName": "storm.local.mode.zmq",
-      "displayType": "checkbox",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.thrift.transport",
-      "displayName": "storm.thrift.transport",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "storm.messaging.transport",
-      "displayName": "storm.messaging.transport",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "General",
-      "displayName": "storm.messaging.netty.buffer_size",
-      "name": "storm.messaging.netty.buffer_size",
-      "displayType": "int",
-      "unit": "bytes",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "General",
-      "displayName": "storm.messaging.netty.max_retries",
-      "name": "storm.messaging.netty.max_retries",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "General",
-      "displayName": "storm.messaging.netty.max_wait_ms",
-      "name": "storm.messaging.netty.max_wait_ms",
-      "displayType": "int",
-      "unit": "ms",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "General",
-      "displayName": "storm.messaging.netty.min_wait_ms",
-      "name": "storm.messaging.netty.min_wait_ms",
-      "displayType": "int",
-      "unit": "ms",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "General",
-      "displayName": "storm.messaging.netty.server_worker_threads",
-      "name": "storm.messaging.netty.server_worker_threads",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "General",
-      "displayName": "storm.messaging.netty.client_worker_threads",
-      "name": "storm.messaging.netty.client_worker_threads",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.host",
-      "displayName": "nimbus.host",
-      "displayType": "masterHost",
-      "isOverridable": false,
-      "isReconfigurable": false,
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.thrift.port",
-      "displayName": "nimbus.thrift.port",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.thrift.max_buffer_size",
-      "displayName": "nimbus.thrift.max_buffer_size",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "bytes"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.childopts",
-      "displayName": "nimbus.childopts",
-      "displayType": "multiLine",
-      "isOverridable": false,
-      "serviceName": "STORM",
-      "category": "NIMBUS",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.task.timeout.secs",
-      "displayName": "nimbus.task.timeout.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.supervisor.timeout.secs",
-      "displayName": "nimbus.supervisor.timeout.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.monitor.freq.secs",
-      "displayName": "nimbus.monitor.freq.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.cleanup.inbox.freq.secs",
-      "displayName": "nimbus.cleanup.inbox.freq.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.inbox.jar.expiration.secs",
-      "displayName": "nimbus.inbox.jar.expiration.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.task.launch.secs",
-      "displayName": "nimbus.task.launch.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.reassign",
-      "displayName": "nimbus.reassign",
-      "displayType": "checkbox",
-      "isReconfigurable": true,
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.file.copy.expiration.secs",
-      "displayName": "nimbus.file.copy.expiration.secs",
-      "displayType": "int",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS",
-      "unit": "seconds"
-    },
-    {
-      "id": "site property",
-      "name": "nimbus.topology.validator",
-      "displayName": "nimbus.topology.validator",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "NIMBUS"
-    },
-    {
-      "id": "site property",
-      "name": "supervisor.slots.ports",
-      "displayName": "supervisor.slots.ports",
-      "displayType": "string",
-      "serviceName": "STORM",
-      "filename": "storm-site.xml",
-      "category": "SUPERVISOR"
-    },
-    {
-      "id": "site property",
-      "isOverrideable": false,
-      "serviceName": "STORM",
-      "category": "SUPERVISOR",
-      "displayName": "supervisor.childopts",
-      "name": "supervisor.childopts",
-      "displayType": "multiLine",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "SUPERVISOR",
-      "displayName": "supervisor.worker.start.timeout.secs",
-      "name": "supervisor.worker.start.timeout.secs",
-      "displayType": "int",
-      "unit": "seconds",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "SUPERVISOR",
-      "displayName": "supervisor.worker.timeout.secs",
-      "name": "supervisor.worker.timeout.secs",
-      "displayType": "int",
-      "unit": "seconds",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "SUPERVISOR",
-      "displayName": "supervisor.monitor.frequency.secs",
-      "name": "supervisor.monitor.frequency.secs",
-      "displayType": "int",
-      "unit": "seconds",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "SUPERVISOR",
-      "displayName": "supervisor.heartbeat.frequency.secs",
-      "name": "supervisor.heartbeat.frequency.secs",
-      "displayType": "int",
-      "unit": "seconds",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "DRPC_SERVER",
-      "displayName": "drpc.port",
-      "name": "drpc.port",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "DRPC_SERVER",
-      "displayName": "drpc.worker.threads",
-      "name": "drpc.worker.threads",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "DRPC_SERVER",
-      "displayName": "drpc.queue.size",
-      "name": "drpc.queue.size",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "DRPC_SERVER",
-      "displayName": "drpc.invocations.port",
-      "name": "drpc.invocations.port",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "DRPC_SERVER",
-      "displayName": "drpc.request.timeout.secs",
-      "name": "drpc.request.timeout.secs",
-      "displayType": "int",
-      "unit": "seconds",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "DRPC_SERVER",
-      "displayName": "drpc.childopts",
-      "name": "drpc.childopts",
-      "displayType": "string",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "STORM_UI_SERVER",
-      "displayName": "ui.port",
-      "name": "ui.port",
-      "displayType": "int",
-      "filename": "storm-site.xml"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "STORM_UI_SERVER",
-      "displayName": "ui.childopts",
-      "name": "ui.childopts",
-      "displayType": "string",
-      "filename": "storm-site.xml"
-    },
-    //@Todo: uncomment following properties when logviewer is treated as different section on storm service page
-    /*
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "LogviewerServer",
-      "displayName": "logviewer.port",
-      "name": "logviewer.port",
-      "displayType": "int"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "LogviewerServer",
-      "displayName": "logviewer.childopts",
-      "name": "logviewer.childopts",
-      "displayType": "string"
-    },
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "LogviewerServer",
-      "displayName": "logviewer.appender.name",
-      "name": "logviewer.appender.name",
-      "displayType": "string"
-    },
-    */
-    {
-      "id": "site property",
-      "serviceName": "STORM",
-      "category": "Advanced storm-site",
-      "displayName": "worker.childopts",
-      "name": "worker.childopts",
-      "displayType": "multiLine",
-      "filename": "storm-site.xml"
-    },
-  /*********************************************oozie-site for Falcon*****************************/
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-job-submit-instances",
-      "name": "oozie.service.ELService.ext.functions.coord-job-submit-instances",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-action-create-inst",
-      "name": "oozie.service.ELService.ext.functions.coord-action-create-inst",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-action-create",
-      "name": "oozie.service.ELService.ext.functions.coord-action-create",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-job-submit-data",
-      "name": "oozie.service.ELService.ext.functions.coord-job-submit-data",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-action-start",
-      "name": "oozie.service.ELService.ext.functions.coord-action-start",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-sla-submit",
-      "name": "oozie.service.ELService.ext.functions.coord-sla-submit",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
-    {
-      "id": "site property",
-      "isReconfigurable": true,
-      "serviceName": "FALCON",
-      "category": "Falcon - Oozie integration",
-      "displayName": "oozie.service.ELService.ext.functions.coord-sla-create",
-      "name": "oozie.service.ELService.ext.functions.coord-sla-create",
-      "displayType": "custom",
-      "filename": "oozie-site.xml"
-    },
+/**********************************************storm-site***************************************/
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.root",
+    "displayName": "storm.zookeeper.root",
+    "displayType": "directory",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.local.dir",
+    "displayName": "storm.local.dir",
+    "defaultDirectory": "/hadoop/storm",
+    "displayType": "directory",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.servers",
+    "displayName": "storm.zookeeper.servers",
+    "displayType": "masterHosts",
+    "isOverridable": false,
+    "isReconfigurable": false,
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.port",
+    "displayName": "storm.zookeeper.port",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.session.timeout",
+    "displayName": "storm.zookeeper.session.timeout",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.connection.timeout",
+    "displayName": "storm.zookeeper.connection.timeout",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.retry.times",
+    "displayName": "storm.zookeeper.retry.times",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.retry.interval",
+    "displayName": "storm.zookeeper.retry.interval",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General",
+    "unit": "ms"
+  },
+  {
+    "id": "site property",
+    "name": "storm.zookeeper.retry.intervalceiling.millis",
+    "displayName": "storm.zookeeper.retry.intervalceiling.millis",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General",
+    "unit": "ms"
+  },
+  {
+    "id": "site property",
+    "name": "storm.cluster.mode",
+    "displayName": "storm.cluster.mode",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.local.mode.zmq",
+    "displayName": "storm.local.mode.zmq",
+    "displayType": "checkbox",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.thrift.transport",
+    "displayName": "storm.thrift.transport",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "name": "storm.messaging.transport",
+    "displayName": "storm.messaging.transport",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "General"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "General",
+    "displayName": "storm.messaging.netty.buffer_size",
+    "name": "storm.messaging.netty.buffer_size",
+    "displayType": "int",
+    "unit": "bytes",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "General",
+    "displayName": "storm.messaging.netty.max_retries",
+    "name": "storm.messaging.netty.max_retries",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "General",
+    "displayName": "storm.messaging.netty.max_wait_ms",
+    "name": "storm.messaging.netty.max_wait_ms",
+    "displayType": "int",
+    "unit": "ms",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "General",
+    "displayName": "storm.messaging.netty.min_wait_ms",
+    "name": "storm.messaging.netty.min_wait_ms",
+    "displayType": "int",
+    "unit": "ms",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "General",
+    "displayName": "storm.messaging.netty.server_worker_threads",
+    "name": "storm.messaging.netty.server_worker_threads",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "General",
+    "displayName": "storm.messaging.netty.client_worker_threads",
+    "name": "storm.messaging.netty.client_worker_threads",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.host",
+    "displayName": "nimbus.host",
+    "displayType": "masterHost",
+    "isOverridable": false,
+    "isReconfigurable": false,
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.thrift.port",
+    "displayName": "nimbus.thrift.port",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.thrift.max_buffer_size",
+    "displayName": "nimbus.thrift.max_buffer_size",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "bytes"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.childopts",
+    "displayName": "nimbus.childopts",
+    "displayType": "multiLine",
+    "isOverridable": false,
+    "serviceName": "STORM",
+    "category": "NIMBUS",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.task.timeout.secs",
+    "displayName": "nimbus.task.timeout.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.supervisor.timeout.secs",
+    "displayName": "nimbus.supervisor.timeout.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.monitor.freq.secs",
+    "displayName": "nimbus.monitor.freq.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.cleanup.inbox.freq.secs",
+    "displayName": "nimbus.cleanup.inbox.freq.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.inbox.jar.expiration.secs",
+    "displayName": "nimbus.inbox.jar.expiration.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.task.launch.secs",
+    "displayName": "nimbus.task.launch.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.reassign",
+    "displayName": "nimbus.reassign",
+    "displayType": "checkbox",
+    "isReconfigurable": true,
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.file.copy.expiration.secs",
+    "displayName": "nimbus.file.copy.expiration.secs",
+    "displayType": "int",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS",
+    "unit": "seconds"
+  },
+  {
+    "id": "site property",
+    "name": "nimbus.topology.validator",
+    "displayName": "nimbus.topology.validator",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "NIMBUS"
+  },
+  {
+    "id": "site property",
+    "name": "supervisor.slots.ports",
+    "displayName": "supervisor.slots.ports",
+    "displayType": "string",
+    "serviceName": "STORM",
+    "filename": "storm-site.xml",
+    "category": "SUPERVISOR"
+  },
+  {
+    "id": "site property",
+    "isOverrideable": false,
+    "serviceName": "STORM",
+    "category": "SUPERVISOR",
+    "displayName": "supervisor.childopts",
+    "name": "supervisor.childopts",
+    "displayType": "multiLine",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "SUPERVISOR",
+    "displayName": "supervisor.worker.start.timeout.secs",
+    "name": "supervisor.worker.start.timeout.secs",
+    "displayType": "int",
+    "unit": "seconds",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "SUPERVISOR",
+    "displayName": "supervisor.worker.timeout.secs",
+    "name": "supervisor.worker.timeout.secs",
+    "displayType": "int",
+    "unit": "seconds",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "SUPERVISOR",
+    "displayName": "supervisor.monitor.frequency.secs",
+    "name": "supervisor.monitor.frequency.secs",
+    "displayType": "int",
+    "unit": "seconds",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "SUPERVISOR",
+    "displayName": "supervisor.heartbeat.frequency.secs",
+    "name": "supervisor.heartbeat.frequency.secs",
+    "displayType": "int",
+    "unit": "seconds",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "DRPC_SERVER",
+    "displayName": "drpc.port",
+    "name": "drpc.port",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "DRPC_SERVER",
+    "displayName": "drpc.worker.threads",
+    "name": "drpc.worker.threads",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "DRPC_SERVER",
+    "displayName": "drpc.queue.size",
+    "name": "drpc.queue.size",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "DRPC_SERVER",
+    "displayName": "drpc.invocations.port",
+    "name": "drpc.invocations.port",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "DRPC_SERVER",
+    "displayName": "drpc.request.timeout.secs",
+    "name": "drpc.request.timeout.secs",
+    "displayType": "int",
+    "unit": "seconds",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "DRPC_SERVER",
+    "displayName": "drpc.childopts",
+    "name": "drpc.childopts",
+    "displayType": "string",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "STORM_UI_SERVER",
+    "displayName": "ui.port",
+    "name": "ui.port",
+    "displayType": "int",
+    "filename": "storm-site.xml"
+  },
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "STORM_UI_SERVER",
+    "displayName": "ui.childopts",
+    "name": "ui.childopts",
+    "displayType": "string",
+    "filename": "storm-site.xml"
+  },
+  //@Todo: uncomment following properties when logviewer is treated as different section on storm service page
+  /*
+   {
+   "id": "site property",
+   "serviceName": "STORM",
+   "category": "LogviewerServer",
+   "displayName": "logviewer.port",
+   "name": "logviewer.port",
+   "displayType": "int"
+   },
+   {
+   "id": "site property",
+   "serviceName": "STORM",
+   "category": "LogviewerServer",
+   "displayName": "logviewer.childopts",
+   "name": "logviewer.childopts",
+   "displayType": "string"
+   },
+   {
+   "id": "site property",
+   "serviceName": "STORM",
+   "category": "LogviewerServer",
+   "displayName": "logviewer.appender.name",
+   "name": "logviewer.appender.name",
+   "displayType": "string"
+   },
+   */
+  {
+    "id": "site property",
+    "serviceName": "STORM",
+    "category": "Advanced storm-site",
+    "displayName": "worker.childopts",
+    "name": "worker.childopts",
+    "displayType": "multiLine",
+    "filename": "storm-site.xml"
+  },
+/*********************************************oozie-site for Falcon*****************************/
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-job-submit-instances",
+    "name": "oozie.service.ELService.ext.functions.coord-job-submit-instances",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-action-create-inst",
+    "name": "oozie.service.ELService.ext.functions.coord-action-create-inst",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-action-create",
+    "name": "oozie.service.ELService.ext.functions.coord-action-create",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-job-submit-data",
+    "name": "oozie.service.ELService.ext.functions.coord-job-submit-data",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-action-start",
+    "name": "oozie.service.ELService.ext.functions.coord-action-start",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-sla-submit",
+    "name": "oozie.service.ELService.ext.functions.coord-sla-submit",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
+  {
+    "id": "site property",
+    "isReconfigurable": true,
+    "serviceName": "FALCON",
+    "category": "Falcon - Oozie integration",
+    "displayName": "oozie.service.ELService.ext.functions.coord-sla-create",
+    "name": "oozie.service.ELService.ext.functions.coord-sla-create",
+    "displayType": "custom",
+    "filename": "oozie-site.xml"
+  },
 
-    // Runtime properties
-    {
-      "id": "site property",
-      "name": "*.domain",
-      "displayName": "*.domain",
-      "category": "FalconRuntimeSite",
-      "serviceName": "FALCON",
-      "filename": "falcon-runtime.properties.xml"
+  // Runtime properties
+  {
+    "id": "site property",
+    "name": "*.domain",
+    "displayName": "*.domain",
+    "category": "FalconRuntimeSite",
+    "serviceName": "FALCON",
+    "filename": "falcon-runtime.properties.xml"
 
-    },
-    {
-      "id": "site property",
-      "name": "*.log.cleanup.frequency.minutes.retention",
-      "displayName": "*.log.cleanup.frequency.minutes.retention",
-      "category": "FalconRuntimeSite",
-      "serviceName": "FALCON",
-      "filename": "falcon-runtime.properties.xml"
-    },
-    {
-      "id": "site property",
-      "name": "*.log.cleanup.frequency.hours.retention",
-      "displayName": "*.log.cleanup.frequency.hours.retention",
-      "category": "FalconRuntimeSite",
-      "serviceName": "FALCON",
-      "filename": "falcon-runtime.properties.xml"
-    },
-    {
-      "id": "site property",
-      "name": "*.log.cleanup.frequency.days.retention",
-      "displayName": "*.log.cleanup.frequency.days.retention",
-      "category": "FalconRuntimeSite",
-      "serviceName": "FALCON",
-      "filename": "falcon-runtime.properties.xml"
-    },
-    {
-      "id": "site property",
-      "name": "*.log.cleanup.frequency.months.retention",
-      "displayName": "*.log.cleanup.frequency.months.retention",
-      "category": "FalconRuntimeSite",
-      "serviceName": "FALCON",
-      "filename": "falcon-runtime.properties.xml"
-    },
+  },
+  {
+    "id": "site property",
+    "name": "*.log.cleanup.frequency.minutes.retention",
+    "displayName": "*.log.cleanup.frequency.minutes.retention",
+    "category": "FalconRuntimeSite",
+    "serviceName": "FALCON",
+    "filename": "falcon-runtime.properties.xml"
+  },
+  {
+    "id": "site property",
+    "name": "*.log.cleanup.frequency.hours.retention",
+    "displayName": "*.log.cleanup.frequency.hours.retention",
+    "category": "FalconRuntimeSite",
+    "serviceName": "FALCON",
+    "filename": "falcon-runtime.properties.xml"
+  },
+  {
+    "id": "site property",
+    "name": "*.log.cleanup.frequency.days.retention",
+    "displayName": "*.log.cleanup.frequency.days.retention",
+    "category": "FalconRuntimeSite",
+    "serviceName": "FALCON",
+    "filename": "falcon-runtime.properties.xml"
+  },
+  {
+    "id": "site property",
+    "name": "*.log.cleanup.frequency.months.retention",
+    "displayName": "*.log.cleanup.frequency.months.retention",
+    "category": "FalconRuntimeSite",
+    "serviceName": "FALCON",
+    "filename": "falcon-runtime.properties.xml"
+  },
 
-    //  Startup properties
+  //  Startup properties
 
-    {
-      "id": "site property",
-      "name": "*.do

<TRUNCATED>

[12/14] ambari git commit: BUG-30575. MapReduce2 Service Check fails after enabling Kerberos with permission issue in local filesystem.

Posted by yu...@apache.org.
BUG-30575. MapReduce2 Service Check fails after enabling Kerberos with permission issue in local filesystem.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a03fb1be
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a03fb1be
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a03fb1be

Branch: refs/heads/2.0-preview
Commit: a03fb1be1ff3780213b56e551337796beb356c51
Parents: 786d780
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Jan 22 11:50:00 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Jan 22 11:50:00 2015 -0800

----------------------------------------------------------------------
 .../common-services/YARN/2.1.0.2.0/package/scripts/yarn.py      | 5 +++++
 .../src/test/python/stacks/2.0.6/YARN/test_nodemanager.py       | 1 +
 2 files changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a03fb1be/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index cf0d211..50d4f11 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -70,6 +70,11 @@ def yarn(name = None):
               mode=0775
               )
 
+    if params.security_enabled:
+      smokeuser_directories = [os.path.join(dir, 'usercache' ,params.smokeuser)
+                               for dir in params.nm_local_dirs.split(',')]
+      for directory in smokeuser_directories:
+        Execute(format("chown -R {params.smokeuser} {directory}"))
   Directory([params.yarn_pid_dir_prefix, params.yarn_pid_dir, params.yarn_log_dir],
             owner=params.yarn_user,
             group=params.user_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/a03fb1be/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 2854399..4a3f32d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -438,6 +438,7 @@ class TestNodeManager(RMFTestCase):
                               mode = 0775,
                               recursive_permission=True
                               )
+    self.assertResourceCalled('Execute', 'chown -R ambari-qa /hadoop/yarn/local/usercache/ambari-qa')
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       group = 'hadoop',


[03/14] ambari git commit: AMBARI-9209. Add the ability to append a random value to values in LDAP attributes when generating principals in Active Directory (rlevas)

Posted by yu...@apache.org.
AMBARI-9209. Add the ability to append a random value to values in LDAP attributes when generating principals in Active Directory (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9f291484
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9f291484
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9f291484

Branch: refs/heads/2.0-preview
Commit: 9f291484ae3764975e3f3f2c616288454bca41c5
Parents: ae82067
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Jan 21 11:20:30 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 12:21:26 2015 -0800

----------------------------------------------------------------------
 .../kerberos/ADKerberosOperationHandler.java    | 161 +++++-----
 .../kerberos/DeconstructedPrincipal.java        | 201 +++++++++++++
 .../kerberos/KerberosOperationHandler.java      |  10 +-
 .../1.10.3-10/configuration/kerberos-env.xml    |  16 +-
 .../ADKerberosOperationHandlerTest.java         | 301 +++++++++++--------
 .../kerberos/DeconstructedPrincipalTest.java    | 113 +++++++
 6 files changed, 582 insertions(+), 220 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9f291484/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
index 20f7e60..b5de64f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
@@ -21,7 +21,7 @@ package org.apache.ambari.server.serveraction.kerberos;
 
 import com.google.common.reflect.TypeToken;
 import com.google.gson.Gson;
-import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.velocity.VelocityContext;
@@ -42,8 +42,6 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 /**
  * Implementation of <code>KerberosOperationHandler</code> to created principal in Active Directory
@@ -52,15 +50,6 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
 
   private static Log LOG = LogFactory.getLog(ADKerberosOperationHandler.class);
 
-  /**
-   * Regular expression to parse the different principal formats:
-   * primary/instance@REALM
-   * primary@REALM
-   * primary/instance
-   * primary
-   */
-  private static Pattern PATTERN_PRINCIPAL = Pattern.compile("^(([^ /@]+)(?:/([^ /@]+))?)(?:@(.+)?)?$");
-
   private static final String LDAP_CONTEXT_FACTORY_CLASS = "com.sun.jndi.ldap.LdapCtxFactory";
 
   public final static String KERBEROS_ENV_LDAP_URL = "ldap_url";
@@ -213,27 +202,14 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     if (principal == null) {
       throw new KerberosOperationException("principal is null");
     }
-    NamingEnumeration<SearchResult> searchResultEnum = null;
+
+    DeconstructedPrincipal deconstructPrincipal = deconstructPrincipal(principal);
+
     try {
-      searchResultEnum = ldapContext.search(
-          principalContainerDn,
-          "(userPrincipalName=" + principal + ")",
-          searchControls);
-      if (searchResultEnum.hasMore()) {
-        return true;
-      }
+      return (findPrincipalDN(deconstructPrincipal.getNormalizedPrincipal()) != null);
     } catch (NamingException ne) {
       throw new KerberosOperationException("can not check if principal exists: " + principal, ne);
-    } finally {
-      try {
-        if (searchResultEnum != null) {
-          searchResultEnum.close();
-        }
-      } catch (NamingException ne) {
-        // ignore, we can not do anything about it
-      }
     }
-    return false;
   }
 
   /**
@@ -262,31 +238,24 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     }
 
     // TODO: (rlevas) pass components and realm in separately (AMBARI-9122)
-    String realm = null;
-    String principal_primary = null;
-    String principal_instance = null;
-
-    Matcher matcher = PATTERN_PRINCIPAL.matcher(principal);
-    if (matcher.matches()) {
-      principal = matcher.group(1);
-      principal_primary = matcher.group(2);
-      principal_instance = matcher.group(3);
-      realm = matcher.group(4);
-    }
+    DeconstructedPrincipal deconstructedPrincipal = deconstructPrincipal(principal);
 
-    if ((realm == null) || realm.isEmpty()) {
-      realm = getDefaultRealm();
+    String realm = deconstructedPrincipal.getRealm();
+    if (realm == null) {
+      realm = "";
     }
 
     Map<String, Object> context = new HashMap<String, Object>();
-    context.put("principal", principal);
-    context.put("principal_primary", principal_primary);
-    context.put("principal_instance", principal_instance);
+    context.put("normalized_principal", deconstructedPrincipal.getNormalizedPrincipal());
+    context.put("principal_name", deconstructedPrincipal.getPrincipalName());
+    context.put("principal_primary", deconstructedPrincipal.getPrimary());
+    context.put("principal_instance", deconstructedPrincipal.getInstance());
     context.put("realm", realm);
-    context.put("realm_lowercase", (realm == null) ? null : realm.toLowerCase());
+    context.put("realm_lowercase", realm.toLowerCase());
     context.put("password", password);
     context.put("is_service", service);
     context.put("container_dn", this.principalContainerDn);
+    context.put("principal_digest", DigestUtils.sha1Hex(deconstructedPrincipal.getNormalizedPrincipal()));
 
     Map<String, Object> data = processCreateTemplate(context);
 
@@ -300,13 +269,11 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
 
         if ("unicodePwd".equals(key)) {
           if (value instanceof String) {
-            Attribute passwordAttr = new BasicAttribute("unicodePwd");  // password
             try {
-              passwordAttr.add(((String) value).getBytes("UTF-16LE"));
+              attributes.put(new BasicAttribute("unicodePwd", String.format("\"%s\"", password).getBytes("UTF-16LE")));
             } catch (UnsupportedEncodingException ue) {
               throw new KerberosOperationException("Can not encode password with UTF-16LE", ue);
             }
-            attributes.put(passwordAttr);
           }
         } else {
           Attribute attribute = new BasicAttribute(key);
@@ -327,7 +294,7 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     }
 
     if (cn == null) {
-      cn = String.format("%s@%s", principal, realm);
+      cn = deconstructedPrincipal.getNormalizedPrincipal();
     }
     try {
       Name name = new CompositeName().add(String.format("cn=%s,%s", cn, principalContainerDn));
@@ -359,26 +326,27 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     if (password == null) {
       throw new KerberosOperationException("principal password is null");
     }
+
+    DeconstructedPrincipal deconstructPrincipal = deconstructPrincipal(principal);
+
     try {
-      if (!principalExists(principal)) {
-        throw new KerberosOperationException("principal not found : " + principal);
+      String dn = findPrincipalDN(deconstructPrincipal.getNormalizedPrincipal());
+
+      if (dn != null) {
+        ldapContext.modifyAttributes(dn,
+            new ModificationItem[]{
+                new ModificationItem(DirContext.REPLACE_ATTRIBUTE, new BasicAttribute("unicodePwd", String.format("\"%s\"", password).getBytes("UTF-16LE")))
+            }
+        );
+      } else {
+        throw new KerberosOperationException(String.format("Can not set password for principal %s: Not Found", principal));
       }
-    } catch (KerberosOperationException e) {
-      e.printStackTrace();
-    }
-    try {
-      ModificationItem[] mods = new ModificationItem[1];
-      String quotedPasswordVal = "\"" + password + "\"";
-      mods[0] = new ModificationItem(DirContext.REPLACE_ATTRIBUTE,
-          new BasicAttribute("UnicodePwd", quotedPasswordVal.getBytes("UTF-16LE")));
-      ldapContext.modifyAttributes(
-          new CompositeName().add("cn=" + principal + "," + principalContainerDn),
-          mods);
-    } catch (NamingException ne) {
-      throw new KerberosOperationException("Can not set password for principal : " + principal, ne);
-    } catch (UnsupportedEncodingException ue) {
-      throw new KerberosOperationException("Unsupported encoding UTF-16LE", ue);
+    } catch (NamingException e) {
+      throw new KerberosOperationException(String.format("Can not set password for principal %s: %s", principal, e.getMessage()), e);
+    } catch (UnsupportedEncodingException e) {
+      throw new KerberosOperationException("Unsupported encoding UTF-16LE", e);
     }
+
     return 0;
   }
 
@@ -399,18 +367,17 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     if (principal == null) {
       throw new KerberosOperationException("principal is null");
     }
+
+    DeconstructedPrincipal deconstructPrincipal = deconstructPrincipal(principal);
+
     try {
-      if (!principalExists(principal)) {
-        return false;
+      String dn = findPrincipalDN(deconstructPrincipal.getNormalizedPrincipal());
+
+      if (dn != null) {
+        ldapContext.destroySubcontext(dn);
       }
-    } catch (KerberosOperationException e) {
-      e.printStackTrace();
-    }
-    try {
-      Name name = new CompositeName().add("cn=" + principal + "," + principalContainerDn);
-      ldapContext.destroySubcontext(name);
-    } catch (NamingException ne) {
-      throw new KerberosOperationException("Can not remove principal: " + principal);
+    } catch (NamingException e) {
+      throw new KerberosOperationException(String.format("Can not remove principal %s: %s", principal, e.getMessage()), e);
     }
 
     return true;
@@ -531,14 +498,14 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     if ((createTemplate == null) || createTemplate.isEmpty()) {
       template = "{" +
           "\"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"]," +
-          "\"cn\": \"$principal\"," +
+          "\"cn\": \"$principal_name\"," +
           "#if( $is_service )" +
-          "  \"servicePrincipalName\": \"$principal\"," +
+          "  \"servicePrincipalName\": \"$principal_name\"," +
           "#end" +
-          "\"userPrincipalName\": \"$principal@$realm.toLowerCase()\"," +
-          "\"unicodePwd\": \"\\\"$password\\\"\"," +
+          "\"userPrincipalName\": \"$normalized_principal.toLowerCase()\"," +
+          "\"unicodePwd\": \"$password\"," +
           "\"accountExpires\": \"0\"," +
-          "\"userAccountControl\": \"512\"" +
+          "\"userAccountControl\": \"66048\"" +
           "}";
     } else {
       template = createTemplate;
@@ -566,4 +533,34 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     return data;
   }
 
+  private String findPrincipalDN(String normalizedPrincipal) throws NamingException, KerberosOperationException {
+    String dn = null;
+
+    if (normalizedPrincipal != null) {
+      NamingEnumeration<SearchResult> results = null;
+
+      try {
+        results = ldapContext.search(
+            principalContainerDn,
+            String.format("(userPrincipalName=%s)", normalizedPrincipal),
+            searchControls
+        );
+
+        if ((results != null) && results.hasMore()) {
+          SearchResult result = results.next();
+          dn = result.getNameInNamespace();
+        }
+      } finally {
+        try {
+          if (results != null) {
+            results.close();
+          }
+        } catch (NamingException ne) {
+          // ignore, we can not do anything about it
+        }
+      }
+    }
+
+    return dn;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9f291484/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
new file mode 100644
index 0000000..f5d8156
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipal.java
@@ -0,0 +1,201 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import javax.annotation.Nullable;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * DeconstructedPrincipal manages the different parts of a principal and can be used to get a
+ * normalized principal value
+ * <p/>
+ * A "normalized" principal has the following forms:
+ * <ul>
+ * <li>primary/instance@realm</li>
+ * <li>primary@realm</li>
+ * </ul>
+ * <p/>
+ * This class will create a DeconstructedPrincipal from a String containing a principal using
+ * {@link DeconstructedPrincipal#valueOf(String, String)}
+ */
+class DeconstructedPrincipal {
+  /**
+   * Regular expression to parse the different principal formats:
+   * <ul>
+   * <li>primary/instance@REALM</li>
+   * <li>primary@REALM</li>
+   * <li>primary/instance</li>
+   * <li>primary</li>
+   * </ul>
+   */
+  private static Pattern PATTERN_PRINCIPAL = Pattern.compile("^([^ /@]+)(?:/([^ /@]+))?(?:@(.+)?)?$");
+
+  /**
+   * A String containing the "primary" component of a principal
+   */
+  private final String primary;
+
+  /**
+   * A String containing the "instance" component of a principal
+   */
+  private final String instance;
+
+  /**
+   * A String containing the "realm" component of a principal
+   */
+  private final String realm;
+
+  /**
+   * A String containing the principal name portion of the principal.
+   * The principal name is the combination of the primary and instance components.
+   * This value is generated using the primary, instance, and realm components.
+   */
+  private final String principalName;
+
+  /**
+   * A String containing the complete normalized principal
+   * The normalized principal is the combination of the primary, instance, and realm components.
+   * This value is generated using the primary, instance, and realm components.
+   */
+  private final String normalizedPrincipal;
+
+  /**
+   * Given a principal and a default realm, creates a new DeconstructedPrincipal
+   * <p/>
+   * If the supplied principal does not have a realm component, the default realm (supplied) will be
+   * used.
+   *
+   * @param principal    a String containing the principal to deconstruct
+   * @param defaultRealm a String containing the default realm
+   * @return a new DeconstructedPrincipal
+   */
+  public static DeconstructedPrincipal valueOf(String principal, @Nullable String defaultRealm) {
+    if (principal == null) {
+      throw new IllegalArgumentException("The principal may not be null");
+    }
+
+    Matcher matcher = PATTERN_PRINCIPAL.matcher(principal);
+
+    if (matcher.matches()) {
+      String primary = matcher.group(1);
+      String instance = matcher.group(2);
+      String realm = matcher.group(3);
+
+      if ((realm == null) || realm.isEmpty()) {
+        realm = defaultRealm;
+      }
+
+      return new DeconstructedPrincipal(primary, instance, realm);
+    } else {
+      throw new IllegalArgumentException(String.format("Invalid principal value: %s", principal));
+    }
+  }
+
+
+  /**
+   * Constructs a new DeconstructedPrincipal
+   *
+   * @param primary  a String containing the "primary" component of the principal
+   * @param instance a String containing the "instance" component of the principal
+   * @param realm    a String containing the "realm" component of the principal
+   */
+  protected DeconstructedPrincipal(String primary, String instance, String realm) {
+    this.primary = primary;
+    this.instance = instance;
+    this.realm = realm;
+
+    StringBuilder builder = new StringBuilder();
+
+    if (this.primary != null) {
+      builder.append(primary);
+    }
+
+    if (this.instance != null) {
+      builder.append('/');
+      builder.append(this.instance);
+    }
+
+    this.principalName = builder.toString();
+
+    if (this.realm != null) {
+      builder.append('@');
+      builder.append(this.realm);
+    }
+
+    this.normalizedPrincipal = builder.toString();
+  }
+
+  /**
+   * Gets the primary component of this DeconstructedPrincipal
+   *
+   * @return a String containing the "primary" component of this DeconstructedPrincipal
+   */
+  public String getPrimary() {
+    return primary;
+  }
+
+  /**
+   * Gets the instance component of this DeconstructedPrincipal
+   *
+   * @return a String containing the "instance" component of this DeconstructedPrincipal
+   */
+  public String getInstance() {
+    return instance;
+  }
+
+  /**
+   * Gets the realm component of this DeconstructedPrincipal
+   *
+   * @return a String containing the "realm" component of this DeconstructedPrincipal
+   */
+  public String getRealm() {
+    return realm;
+  }
+
+  /**
+   * Gets the constructed principal name for this DeconstructedPrincipal
+   * <p/>
+   * The principal name is the combination of the primary and instance components:
+   * <ul>
+   * <li>primary/instance</li>
+   * <li>primary</li>
+   * </ul>
+   *
+   * @return a String containing the "realm" component of this DeconstructedPrincipal
+   */
+  public String getPrincipalName() {
+    return principalName;
+  }
+
+  /**
+   * Gets the constructed normalized  principal for this DeconstructedPrincipal
+   * <p/>
+   * The normalized principal is the combination of the primary, instance, and realm components:
+   * <ul>
+   * <li>primary/instance@realm</li>
+   * <li>primary@realm</li>
+   * </ul>
+   *
+   * @return a String containing the "realm" component of this DeconstructedPrincipal
+   */
+  public String getNormalizedPrincipal() {
+    return normalizedPrincipal;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9f291484/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
index 7a9233b..a23aa81 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
@@ -63,7 +63,6 @@ public abstract class KerberosOperationHandler {
   private final static char[] SECURE_PASSWORD_CHARS =
       "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890?.!$%^*()-_+=~".toCharArray();
 
-
   /**
    * The default set of ciphers to use for creating keytab entries
    */
@@ -432,4 +431,13 @@ public abstract class KerberosOperationHandler {
       }
     }
   }
+
+  protected DeconstructedPrincipal deconstructPrincipal(String principal) throws KerberosOperationException {
+    try {
+      return DeconstructedPrincipal.valueOf(principal, getDefaultRealm());
+    } catch (IllegalArgumentException e) {
+      throw new KerberosOperationException(e.getMessage(), e);
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9f291484/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index 85ae018..d37e736 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -40,19 +40,23 @@
   <property require-input="true">
     <name>create_attributes_template</name>
     <description>
-      Customizable JSON document representing the LDAP attributes needed to create a new Kerberos entity in the KDC (Velocity template engine).
+      A Velocity template to use to generate a JSON-formatted document containing the set of
+      attribute names and values needed to create a new Kerberos identity in the relevant KDC.
+      Variables include:
+      principal_name, principal_primary, principal_instance, realm, realm_lowercase,
+      normalized_principal, principal digest, password, is_service, container_dn
     </description>
     <value>
 {
   "objectClass": ["top", "person", "organizationalPerson", "user"],
-  "cn": "$principal",
+  "cn": "$principal_name",
   #if( $is_service )
-  "servicePrincipalName": "$principal",
+  "servicePrincipalName": "$principal_name",
   #end
-  "userPrincipalName": "$principal@$realm.toLowerCase()",
-  "unicodePwd": "\"$password\"",
+  "userPrincipalName": "$normalized_principal.toLowerCase()",
+  "unicodePwd": "$password",
   "accountExpires": "0",
-  "userAccountControl": "512"
+  "userAccountControl": "66048"
 }
     </value>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9f291484/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
index 6a89dbb..8d2a3c4 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
@@ -19,6 +19,8 @@
 package org.apache.ambari.server.serveraction.kerberos;
 
 import junit.framework.Assert;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
 import org.easymock.EasyMockSupport;
 import org.easymock.IAnswer;
 import org.junit.Ignore;
@@ -26,21 +28,22 @@ import org.junit.Test;
 
 import javax.naming.AuthenticationException;
 import javax.naming.CommunicationException;
+import javax.naming.Name;
 import javax.naming.NamingEnumeration;
+import javax.naming.directory.Attributes;
+import javax.naming.directory.DirContext;
 import javax.naming.directory.SearchControls;
 import javax.naming.directory.SearchResult;
 import javax.naming.ldap.Control;
 import javax.naming.ldap.LdapContext;
 
-import java.util.ArrayList;
-import java.util.Arrays;
+import java.nio.charset.Charset;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.*;
 
 public class ADKerberosOperationHandlerTest extends EasyMockSupport {
   private static final String DEFAULT_ADMIN_PRINCIPAL = "cluser_admin@HDP01.LOCAL";
@@ -235,32 +238,30 @@ public class ADKerberosOperationHandlerTest extends EasyMockSupport {
       }
     };
 
+    Capture<Name> capturedName = new Capture<Name>(CaptureType.ALL);
+    Capture<Attributes> capturedAttributes = new Capture<Attributes>(CaptureType.ALL);
+
     ADKerberosOperationHandler handler = createMockBuilder(ADKerberosOperationHandler.class)
         .addMockedMethod(ADKerberosOperationHandler.class.getDeclaredMethod("createInitialLdapContext", Properties.class, Control[].class))
         .addMockedMethod(ADKerberosOperationHandler.class.getDeclaredMethod("createSearchControls"))
         .createNiceMock();
 
+    NamingEnumeration<SearchResult> searchResult = createNiceMock(NamingEnumeration.class);
+    expect(searchResult.hasMore()).andReturn(false).once();
+
+    LdapContext ldapContext = createNiceMock(LdapContext.class);
+    expect(ldapContext.search(anyObject(String.class), anyObject(String.class), anyObject(SearchControls.class)))
+        .andReturn(searchResult)
+        .once();
+
+    expect(ldapContext.createSubcontext(capture(capturedName), capture(capturedAttributes)))
+        .andReturn(createNiceMock(DirContext.class))
+        .anyTimes();
+
     expect(handler.createInitialLdapContext(anyObject(Properties.class), anyObject(Control[].class)))
-        .andAnswer(new IAnswer<LdapContext>() {
-          @Override
-          public LdapContext answer() throws Throwable {
-            LdapContext ldapContext = createNiceMock(LdapContext.class);
-            expect(ldapContext.search(anyObject(String.class), anyObject(String.class), anyObject(SearchControls.class)))
-                .andAnswer(new IAnswer<NamingEnumeration<SearchResult>>() {
-                  @Override
-                  public NamingEnumeration<SearchResult> answer() throws Throwable {
-                    NamingEnumeration<SearchResult> result = createNiceMock(NamingEnumeration.class);
-                    expect(result.hasMore()).andReturn(false).once();
-                    replay(result);
-                    return result;
-                  }
-                })
-                .once();
-            replay(ldapContext);
-            return ldapContext;
-          }
-        })
+        .andReturn(ldapContext)
         .once();
+
     expect(handler.createSearchControls()).andAnswer(new IAnswer<SearchControls>() {
       @Override
       public SearchControls answer() throws Throwable {
@@ -273,45 +274,67 @@ public class ADKerberosOperationHandlerTest extends EasyMockSupport {
     replayAll();
 
     handler.open(kc, DEFAULT_REALM, kerberosEnvMap);
+    handler.createPrincipal("nn/c6501.ambari.apache.org", "secret", true);
+    handler.createPrincipal("hdfs@" + DEFAULT_REALM, "secret", false);
+    handler.close();
 
-    Map<String, Object> context = new HashMap<String, Object>();
-    context.put("principal", "nn/c6501.ambari.apache.org");
-    context.put("principal_primary", "nn");
-    context.put("principal_instance", "c6501.ambari.apache.org");
-    context.put("realm", "EXAMPLE.COM");
-    context.put("realm_lowercase", "example.com");
-    context.put("password", "secret");
-    context.put("is_service", true);
-    context.put("container_dn", "ou=cluster,DC=EXAMPLE,DC=COM");
-
-    Map<String, Object> data;
-
-    data = handler.processCreateTemplate(context);
-
-    Assert.assertNotNull(data);
-    Assert.assertEquals(7, data.size());
-    Assert.assertEquals(new ArrayList<String>(Arrays.asList("top", "person", "organizationalPerson", "user")), data.get("objectClass"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org", data.get("cn"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org", data.get("servicePrincipalName"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org@example.com", data.get("userPrincipalName"));
-    Assert.assertEquals("\"secret\"", data.get("unicodePwd"));
-    Assert.assertEquals("0", data.get("accountExpires"));
-    Assert.assertEquals("512", data.get("userAccountControl"));
-
-
-    context.put("is_service", false);
-    data = handler.processCreateTemplate(context);
-
-    Assert.assertNotNull(data);
-    Assert.assertEquals(6, data.size());
-    Assert.assertEquals(new ArrayList<String>(Arrays.asList("top", "person", "organizationalPerson", "user")), data.get("objectClass"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org", data.get("cn"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org@example.com", data.get("userPrincipalName"));
-    Assert.assertEquals("\"secret\"", data.get("unicodePwd"));
-    Assert.assertEquals("0", data.get("accountExpires"));
-    Assert.assertEquals("512", data.get("userAccountControl"));
+    List<Attributes> attributesList = capturedAttributes.getValues();
+    Attributes attributes;
 
-    handler.close();
+    attributes = attributesList.get(0);
+    String[] objectClasses = new String[]{"top", "person", "organizationalPerson", "user"};
+
+    Assert.assertNotNull(attributes);
+    Assert.assertEquals(7, attributes.size());
+
+    Assert.assertNotNull(attributes.get("objectClass"));
+    Assert.assertEquals(objectClasses.length, attributes.get("objectClass").size());
+    for (int i = 0; i < objectClasses.length; i++) {
+      Assert.assertEquals(objectClasses[i], attributes.get("objectClass").get(i));
+    }
+
+    Assert.assertNotNull(attributes.get("cn"));
+    Assert.assertEquals("nn/c6501.ambari.apache.org", attributes.get("cn").get());
+
+    Assert.assertNotNull(attributes.get("servicePrincipalName"));
+    Assert.assertEquals("nn/c6501.ambari.apache.org", attributes.get("servicePrincipalName").get());
+
+    Assert.assertNotNull(attributes.get("userPrincipalName"));
+    Assert.assertEquals("nn/c6501.ambari.apache.org@hdp01.local", attributes.get("userPrincipalName").get());
+
+    Assert.assertNotNull(attributes.get("unicodePwd"));
+    Assert.assertEquals("\"secret\"", new String((byte[]) attributes.get("unicodePwd").get(), Charset.forName("UTF-16LE")));
+
+    Assert.assertNotNull(attributes.get("accountExpires"));
+    Assert.assertEquals("0", attributes.get("accountExpires").get());
+
+    Assert.assertNotNull(attributes.get("userAccountControl"));
+    Assert.assertEquals("66048", attributes.get("userAccountControl").get());
+
+    attributes = attributesList.get(1);
+    Assert.assertNotNull(attributes);
+    Assert.assertEquals(6, attributes.size());
+
+    Assert.assertNotNull(attributes.get("objectClass"));
+    Assert.assertEquals(objectClasses.length, attributes.get("objectClass").size());
+    for (int i = 0; i < objectClasses.length; i++) {
+      Assert.assertEquals(objectClasses[i], attributes.get("objectClass").get(i));
+    }
+
+    Assert.assertNotNull(attributes.get("cn"));
+    Assert.assertEquals("hdfs", attributes.get("cn").get());
+
+    Assert.assertNotNull(attributes.get("userPrincipalName"));
+    Assert.assertEquals("hdfs@hdp01.local", attributes.get("userPrincipalName").get());
+
+    Assert.assertNotNull(attributes.get("unicodePwd"));
+    Assert.assertEquals("\"secret\"", new String((byte[]) attributes.get("unicodePwd").get(), Charset.forName("UTF-16LE")));
+
+    Assert.assertNotNull(attributes.get("accountExpires"));
+    Assert.assertEquals("0", attributes.get("accountExpires").get());
+
+    Assert.assertNotNull(attributes.get("userAccountControl"));
+    Assert.assertEquals("66048", attributes.get("userAccountControl").get());
   }
 
   @Test
@@ -321,54 +344,52 @@ public class ADKerberosOperationHandlerTest extends EasyMockSupport {
       {
         put(ADKerberosOperationHandler.KERBEROS_ENV_LDAP_URL, DEFAULT_LDAP_URL);
         put(ADKerberosOperationHandler.KERBEROS_ENV_PRINCIPAL_CONTAINER_DN, DEFAULT_PRINCIPAL_CONTAINER_DN);
-        put(ADKerberosOperationHandler.KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE, "{" +
+        put(ADKerberosOperationHandler.KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE, "" +
+            "#set( $user = \"${principal_primary}-${principal_digest}\" )" +
+            "{" +
             "  \"objectClass\": [" +
             "    \"top\"," +
             "    \"person\"," +
             "    \"organizationalPerson\"," +
             "    \"user\"" +
             "  ]," +
-            "  \"cn\": \"$principal@$realm\"," +
-            "  \"dn\": \"$principal@$realm,$container_dn\"," +
-            "  \"distinguishedName\": \"$principal@$realm,$container_dn\"," +
-            "  \"sAMAccountName\": \"$principal\"," +
+            "  \"cn\": \"$user\"," +
+            "  \"sAMAccountName\": \"$user.substring(0,20)\"," +
             "  #if( $is_service )" +
-            "  \"servicePrincipalName\": \"$principal\"," +
+            "  \"servicePrincipalName\": \"$principal_name\"," +
             "  #end" +
-            "  \"userPrincipalName\": \"$principal@$realm.toLowerCase()\"," +
-            "  \"unicodePwd\": \"`$password`\"," +
+            "  \"userPrincipalName\": \"$normalized_principal.toLowerCase()\"," +
+            "  \"unicodePwd\": \"$password\"," +
             "  \"accountExpires\": \"0\"," +
             "  \"userAccountControl\": \"66048\"" +
             "}");
       }
     };
 
+    Capture<Name> capturedName = new Capture<Name>();
+    Capture<Attributes> capturedAttributes = new Capture<Attributes>();
+
     ADKerberosOperationHandler handler = createMockBuilder(ADKerberosOperationHandler.class)
         .addMockedMethod(ADKerberosOperationHandler.class.getDeclaredMethod("createInitialLdapContext", Properties.class, Control[].class))
         .addMockedMethod(ADKerberosOperationHandler.class.getDeclaredMethod("createSearchControls"))
         .createNiceMock();
 
+    NamingEnumeration<SearchResult> searchResult = createNiceMock(NamingEnumeration.class);
+    expect(searchResult.hasMore()).andReturn(false).once();
+
+    LdapContext ldapContext = createNiceMock(LdapContext.class);
+    expect(ldapContext.search(anyObject(String.class), anyObject(String.class), anyObject(SearchControls.class)))
+        .andReturn(searchResult)
+        .once();
+
+    expect(ldapContext.createSubcontext(capture(capturedName), capture(capturedAttributes)))
+        .andReturn(createNiceMock(DirContext.class))
+        .once();
+
     expect(handler.createInitialLdapContext(anyObject(Properties.class), anyObject(Control[].class)))
-        .andAnswer(new IAnswer<LdapContext>() {
-          @Override
-          public LdapContext answer() throws Throwable {
-            LdapContext ldapContext = createNiceMock(LdapContext.class);
-            expect(ldapContext.search(anyObject(String.class), anyObject(String.class), anyObject(SearchControls.class)))
-                .andAnswer(new IAnswer<NamingEnumeration<SearchResult>>() {
-                  @Override
-                  public NamingEnumeration<SearchResult> answer() throws Throwable {
-                    NamingEnumeration<SearchResult> result = createNiceMock(NamingEnumeration.class);
-                    expect(result.hasMore()).andReturn(false).once();
-                    replay(result);
-                    return result;
-                  }
-                })
-                .once();
-            replay(ldapContext);
-            return ldapContext;
-          }
-        })
+        .andReturn(ldapContext)
         .once();
+
     expect(handler.createSearchControls()).andAnswer(new IAnswer<SearchControls>() {
       @Override
       public SearchControls answer() throws Throwable {
@@ -381,34 +402,43 @@ public class ADKerberosOperationHandlerTest extends EasyMockSupport {
     replayAll();
 
     handler.open(kc, DEFAULT_REALM, kerberosEnvMap);
+    handler.createPrincipal("nn/c6501.ambari.apache.org", "secret", true);
+    handler.close();
 
+    Attributes attributes = capturedAttributes.getValue();
+    String[] objectClasses = new String[]{"top", "person", "organizationalPerson", "user"};
 
-    Map<String, Object> context = new HashMap<String, Object>();
-    context.put("principal", "nn/c6501.ambari.apache.org");
-    context.put("principal_primary", "nn");
-    context.put("principal_instance", "c6501.ambari.apache.org");
-    context.put("realm", "EXAMPLE.COM");
-    context.put("realm_lowercase", "example.com");
-    context.put("password", "secret");
-    context.put("is_service", true);
-    context.put("container_dn", "ou=cluster,DC=EXAMPLE,DC=COM");
-
-    Map<String, Object> data = handler.processCreateTemplate(context);
-
-    Assert.assertNotNull(data);
-    Assert.assertEquals(10, data.size());
-    Assert.assertEquals(new ArrayList<String>(Arrays.asList("top", "person", "organizationalPerson", "user")), data.get("objectClass"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org@EXAMPLE.COM", data.get("cn"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org", data.get("servicePrincipalName"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org@example.com", data.get("userPrincipalName"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org", data.get("sAMAccountName"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org@EXAMPLE.COM,ou=cluster,DC=EXAMPLE,DC=COM", data.get("distinguishedName"));
-    Assert.assertEquals("nn/c6501.ambari.apache.org@EXAMPLE.COM,ou=cluster,DC=EXAMPLE,DC=COM", data.get("dn"));
-    Assert.assertEquals("`secret`", data.get("unicodePwd"));
-    Assert.assertEquals("0", data.get("accountExpires"));
-    Assert.assertEquals("66048", data.get("userAccountControl"));
+    Assert.assertNotNull(attributes);
+    Assert.assertEquals(8, attributes.size());
+
+    Assert.assertNotNull(attributes.get("objectClass"));
+    Assert.assertEquals(objectClasses.length, attributes.get("objectClass").size());
+    for (int i = 0; i < objectClasses.length; i++) {
+      Assert.assertEquals(objectClasses[i], attributes.get("objectClass").get(i));
+    }
+
+    Assert.assertNotNull(attributes.get("cn"));
+    Assert.assertEquals("nn-995e1580db28198e7fda1417ab5d894c877937d2", attributes.get("cn").get());
+
+    Assert.assertNotNull(attributes.get("servicePrincipalName"));
+    Assert.assertEquals("nn/c6501.ambari.apache.org", attributes.get("servicePrincipalName").get());
+
+    Assert.assertNotNull(attributes.get("userPrincipalName"));
+    Assert.assertEquals("nn/c6501.ambari.apache.org@hdp01.local", attributes.get("userPrincipalName").get());
+
+    Assert.assertNotNull(attributes.get("sAMAccountName"));
+    Assert.assertTrue(attributes.get("sAMAccountName").get().toString().length() <= 20);
+    Assert.assertEquals("nn-995e1580db28198e7", attributes.get("sAMAccountName").get());
+
+    Assert.assertNotNull(attributes.get("unicodePwd"));
+    Assert.assertEquals("\"secret\"", new String((byte[]) attributes.get("unicodePwd").get(), Charset.forName("UTF-16LE")));
+
+    Assert.assertNotNull(attributes.get("accountExpires"));
+    Assert.assertEquals("0", attributes.get("accountExpires").get());
+
+    Assert.assertNotNull(attributes.get("userAccountControl"));
+    Assert.assertEquals("66048", attributes.get("userAccountControl").get());
 
-    handler.close();
   }
 
   /**
@@ -458,31 +488,40 @@ public class ADKerberosOperationHandlerTest extends EasyMockSupport {
     // does the principal already exist?
     System.out.println("Principal exists: " + handler.principalExists("nn/c1508.ambari.apache.org"));
 
-    //create principal
-//    handler.createPrincipal("nn/c1508.ambari.apache.org@" + DEFAULT_REALM, handler.createSecurePassword(), true);
-
     handler.close();
 
-    kerberosEnvMap.put(ADKerberosOperationHandler.KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE, "{" +
-        "\"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"]," +
-        "\"distinguishedName\": \"CN=$principal@$realm,$container_dn\"," +
-        "#if( $is_service )" +
-        "\"servicePrincipalName\": \"$principal\"," +
-        "#end" +
-        "\"userPrincipalName\": \"$principal@$realm.toLowerCase()\"," +
-        "\"unicodePwd\": \"\\\"$password\\\"\"," +
-        "\"accountExpires\": \"0\"," +
-        "\"userAccountControl\": \"66048\"" +
-        "}");
+    kerberosEnvMap.put(ADKerberosOperationHandler.KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE,
+        "#set( $user = \"${principal_primary}-${principal_digest}\" )" +
+            "{" +
+            "  \"objectClass\": [" +
+            "    \"top\"," +
+            "    \"person\"," +
+            "    \"organizationalPerson\"," +
+            "    \"user\"" +
+            "  ]," +
+            "  \"cn\": \"$user\"," +
+            "  \"sAMAccountName\": \"$user.substring(0,20)\"," +
+            "  #if( $is_service )" +
+            "  \"servicePrincipalName\": \"$principal_name\"," +
+            "  #end" +
+            "  \"userPrincipalName\": \"$normalized_principal.toLowerCase()\"," +
+            "  \"unicodePwd\": \"$password\"," +
+            "  \"accountExpires\": \"0\"," +
+            "  \"userAccountControl\": \"66048\"" +
+            "}"
+    );
 
     handler.open(credentials, realm, kerberosEnvMap);
+
+    // remove the principal
+    handler.removePrincipal("abcdefg");
+    handler.removePrincipal("abcdefg/c1509.ambari.apache.org@" + DEFAULT_REALM);
+
     handler.createPrincipal("abcdefg/c1509.ambari.apache.org@" + DEFAULT_REALM, handler.createSecurePassword(), true);
+    handler.createPrincipal("abcdefg@" + DEFAULT_REALM, handler.createSecurePassword(), false);
 
     //update the password
-    handler.setPrincipalPassword("nn/c1508.ambari.apache.org", handler.createSecurePassword());
-
-    // remove the principal
-    // handler.removeServicePrincipal("nn/c1508.ambari.apache.org");
+    handler.setPrincipalPassword("abcdefg/c1509.ambari.apache.org@" + DEFAULT_REALM, handler.createSecurePassword());
 
     handler.close();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9f291484/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipalTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipalTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipalTest.java
new file mode 100644
index 0000000..28dd08a
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/DeconstructedPrincipalTest.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class DeconstructedPrincipalTest {
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testNullPrincipal() throws Exception {
+    DeconstructedPrincipal.valueOf(null, null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testEmptyPrincipal() throws Exception {
+    DeconstructedPrincipal.valueOf("", null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInvalidPrincipal() throws Exception {
+    DeconstructedPrincipal.valueOf("/invalid", null);
+  }
+
+  @Test
+  public void testPrimary() throws Exception {
+    DeconstructedPrincipal deconstructedPrincipal = DeconstructedPrincipal.valueOf("primary", "REALM");
+
+    assertNotNull(deconstructedPrincipal);
+    assertEquals("primary", deconstructedPrincipal.getPrimary());
+    assertNull(deconstructedPrincipal.getInstance());
+    assertEquals("REALM", deconstructedPrincipal.getRealm());
+    assertEquals("primary", deconstructedPrincipal.getPrincipalName());
+    assertEquals("primary@REALM", deconstructedPrincipal.getNormalizedPrincipal());
+  }
+
+  @Test
+  public void testPrimaryRealm() throws Exception {
+    DeconstructedPrincipal deconstructedPrincipal = DeconstructedPrincipal.valueOf("primary@MYREALM", "REALM");
+
+    assertNotNull(deconstructedPrincipal);
+    assertEquals("primary", deconstructedPrincipal.getPrimary());
+    assertNull(deconstructedPrincipal.getInstance());
+    assertEquals("MYREALM", deconstructedPrincipal.getRealm());
+    assertEquals("primary", deconstructedPrincipal.getPrincipalName());
+    assertEquals("primary@MYREALM", deconstructedPrincipal.getNormalizedPrincipal());
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInstance() throws Exception {
+    DeconstructedPrincipal.valueOf("/instance", "REALM");
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInstanceRealm() throws Exception {
+    DeconstructedPrincipal.valueOf("/instance@MYREALM", "REALM");
+  }
+
+  @Test
+  public void testPrimaryInstance() throws Exception {
+    DeconstructedPrincipal deconstructedPrincipal = DeconstructedPrincipal.valueOf("primary/instance", "REALM");
+
+    assertNotNull(deconstructedPrincipal);
+    assertEquals("primary", deconstructedPrincipal.getPrimary());
+    assertEquals("instance", deconstructedPrincipal.getInstance());
+    assertEquals("instance", deconstructedPrincipal.getInstance());
+    assertEquals("REALM", deconstructedPrincipal.getRealm());
+    assertEquals("primary/instance", deconstructedPrincipal.getPrincipalName());
+    assertEquals("primary/instance@REALM", deconstructedPrincipal.getNormalizedPrincipal());
+  }
+
+  @Test
+  public void testPrimaryInstanceRealm() throws Exception {
+    DeconstructedPrincipal deconstructedPrincipal = DeconstructedPrincipal.valueOf("primary/instance@MYREALM", "REALM");
+
+    assertNotNull(deconstructedPrincipal);
+    assertEquals("primary", deconstructedPrincipal.getPrimary());
+    assertEquals("instance", deconstructedPrincipal.getInstance());
+    assertEquals("MYREALM", deconstructedPrincipal.getRealm());
+    assertEquals("primary/instance", deconstructedPrincipal.getPrincipalName());
+    assertEquals("primary/instance@MYREALM", deconstructedPrincipal.getNormalizedPrincipal());
+  }
+
+  @Test
+  public void testOddCharacters() throws Exception {
+    DeconstructedPrincipal deconstructedPrincipal = DeconstructedPrincipal.valueOf("p_ri.ma-ry/i.n_s-tance@M_Y-REALM.COM", "REALM");
+
+    assertNotNull(deconstructedPrincipal);
+    assertEquals("p_ri.ma-ry", deconstructedPrincipal.getPrimary());
+    assertEquals("i.n_s-tance", deconstructedPrincipal.getInstance());
+    assertEquals("M_Y-REALM.COM", deconstructedPrincipal.getRealm());
+    assertEquals("p_ri.ma-ry/i.n_s-tance", deconstructedPrincipal.getPrincipalName());
+    assertEquals("p_ri.ma-ry/i.n_s-tance@M_Y-REALM.COM", deconstructedPrincipal.getNormalizedPrincipal());
+  }
+
+}
\ No newline at end of file


[02/14] ambari git commit: AMBARI-9222. Kerberos wizard: Property description tweaks on configure Kerberos page. (jaimin)

Posted by yu...@apache.org.
AMBARI-9222. Kerberos wizard: Property description tweaks on configure Kerberos page. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ae82067d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ae82067d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ae82067d

Branch: refs/heads/2.0-preview
Commit: ae82067d3515fc456a68972de99530a33861b7dd
Parents: 0898c73
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Tue Jan 20 13:07:37 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Tue Jan 20 13:52:07 2015 -0800

----------------------------------------------------------------------
 .../1.10.3-10/configuration/kerberos-env.xml    |    3 +-
 .../1.10.3-10/configuration/krb5-conf.xml       |   42 +-
 ambari-web/app/data/HDP2/site_properties.js     | 8954 +++++++++---------
 3 files changed, 4514 insertions(+), 4485 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ae82067d/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index f44677f..85ae018 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -40,8 +40,7 @@
   <property require-input="true">
     <name>create_attributes_template</name>
     <description>
-      A Velocity template to use to generate a JSON-formatted document containing the set of
-      attribute names and values needed to create a new Kerberos identity in the relevant KDC.
+      Customizable JSON document representing the LDAP attributes needed to create a new Kerberos entity in the KDC (Velocity template engine).
     </description>
     <value>
 {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae82067d/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
index c10ae73..38f6ab2 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
@@ -23,42 +23,65 @@
 <configuration>
   <property>
     <name>logging_default</name>
+    <description>
+      Default Kerberos library log location.
+    </description>
     <value>FILE:/var/log/krb5libs.log</value>
   </property>
   <property>
     <name>logging_kdc</name>
+    <description>
+      KDC log location.
+    </description>
     <value>FILE:/var/log/krb5kdc.log</value>
   </property>
   <property>
     <name>logging_admin_server</name>
+    <description>
+      Admin server log location.
+    </description>
     <value>FILE:/var/log/kadmind.log</value>
   </property>
 
   <property>
     <name>libdefaults_dns_lookup_realm</name>
+    <description>
+      If true, DNS TXT records will be used to determine the Kerberos realm of a host.
+    </description>
     <value>false</value>
   </property>
   <property>
     <name>libdefaults_dns_lookup_kdc</name>
+    <description>
+      If true, DNS SRV records will be used to locate the KDCs and other servers for the realm.
+    </description>
     <value>false</value>
   </property>
   <property>
     <name>libdefaults_ticket_lifetime</name>
+    <description>
+      Default lifetime of a ticket.
+    </description>
     <value>24h</value>
   </property>
   <property>
     <name>libdefaults_renew_lifetime</name>
+    <description>
+      Default renewable lifetime for initial tickets.
+    </description>
     <value>7d</value>
   </property>
   <property>
     <name>libdefaults_forwardable</name>
+    <description>
+      If true, initial tickets will be forwardable.
+    </description>
     <value>true</value>
   </property>
   <property>
     <name>libdefaults_default_tgs_enctypes</name>
     <description>
-      a space-delimited list of session key encryption types supported by the KDC or Active
-      Directory
+      The supported list of session key encryption types that should be returned by the KDC.
     </description>
     <value>
       aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5
@@ -68,8 +91,7 @@
   <property>
     <name>libdefaults_default_tkt_enctypes</name>
     <description>
-      a space-delimited list of session key encryption types supported by the KDC or Active
-      Directory
+      The supported list of session key encryption types that should be requested by the client.
     </description>
     <value>
       aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5
@@ -80,14 +102,14 @@
   <property require-input="true">
     <name>realm</name>
     <description>
-      The realm to use when creating service principals
+      The realm to use when creating Service and Ambari principals from the realms section of your /etc/krb5.conf (e.g. EXAMPLE.COM).
     </description>
     <value/>
   </property>
   <property require-input="true">
     <name>domains</name>
     <description>
-      A comma-delimited list of domain names that the realm serves (optional)
+      A comma-seperated list of domain names used to map server hostnames to the Realm name (e.g. .example.com,example.com). This is optional
     </description>
     <value/>
   </property>
@@ -101,16 +123,14 @@
   <property require-input="true">
     <name>kdc_host</name>
     <description>
-      The IP address or FQDN of the KDC or Active Directory server, optionally a port number may be
-      provided
+      The IP address or FQDN for the KDC host. Optionally a port number may be included.
     </description>
     <value/>
   </property>
   <property>
     <name>admin_server_host</name>
     <description>
-      The IP address or FQDN of the administrative Kerberos server, optionally a port number may be
-      provided
+      The IP address or FQDN for the KDC Kerberos administrative host. Optionally a port number may be included.
     </description>
     <value/>
   </property>
@@ -123,7 +143,7 @@
   </property>
   <property>
     <name>content</name>
-    <description>The jinja template for the krb5.conf file</description>
+    <description>Customizable krb5.conf template (Jinja template engine)</description>
     <value>
 [libdefaults]
   renew_lifetime = {{libdefaults_renew_lifetime}}


[08/14] ambari git commit: AMBARI-9248. Fix post-install hook for hdp-select (dlysnichenko)

Posted by yu...@apache.org.
AMBARI-9248. Fix post-install hook for hdp-select (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1154fe4b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1154fe4b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1154fe4b

Branch: refs/heads/2.0-preview
Commit: 1154fe4be38d9e1bdf4b311b423a80de31035617
Parents: 84bfe43
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Jan 21 23:45:39 2015 +0200
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 14:16:29 2015 -0800

----------------------------------------------------------------------
 .../src/main/python/ambari_agent/HostCleanup.py   | 13 ++++++++++++-
 .../test/python/ambari_agent/TestHostCleanup.py   | 18 +++++++++++++++++-
 .../scripts/shared_initialization.py              |  8 ++++++--
 3 files changed, 35 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1154fe4b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
index 7aeb70a..398502e 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
@@ -87,8 +87,10 @@ DIRNAME_PATTERNS = [
 REPOSITORY_BLACK_LIST = ["ambari.repo"]
 PACKAGES_BLACK_LIST = ["ambari-server", "ambari-agent"]
 
-
 class HostCleanup:
+
+  SELECT_ALL_PERFORMED_MARKER = "/var/lib/ambari-agent/data/hdp-select-set-all.performed"
+
   def resolve_ambari_config(self):
     try:
       config = AmbariConfig()
@@ -134,6 +136,8 @@ class HostCleanup:
       if packageList and not PACKAGE_SECTION in SKIP_LIST:
         logger.info("Deleting packages: " + str(packageList) + "\n")
         self.do_erase_packages(packageList)
+        # Removing packages means that we have to rerun hdp-select
+        self.do_remove_hdp_select_marker()
       if userList and not USER_SECTION in SKIP_LIST:
         logger.info("\n" + "Deleting users: " + str(userList))
         self.do_delete_users(userList)
@@ -260,6 +264,13 @@ class HostCleanup:
       self.do_erase_files_silent(remList)
 
 
+  def do_remove_hdp_select_marker(self):
+    """
+    Remove marker file for 'hdp-select set all' invocation
+    """
+    if os.path.isfile(self.SELECT_ALL_PERFORMED_MARKER):
+      os.unlink(self.SELECT_ALL_PERFORMED_MARKER)
+
 
   # Alternatives exist as a stack of symlinks under /var/lib/alternatives/$name
   # Script expects names of the alternatives as input

http://git-wip-us.apache.org/repos/asf/ambari/blob/1154fe4b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
index 84b96cc..f43784c 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
@@ -368,7 +368,7 @@ class TestHostCleanup(TestCase):
 
   @patch.object(HostCleanup.HostCleanup, 'run_os_command')
   @patch.object(OSCheck, "get_os_type")
-  def test_do_earse_packages(self, get_os_type_method, run_os_command_method):
+  def test_do_erase_packages(self, get_os_type_method, run_os_command_method):
     out = StringIO.StringIO()
     sys.stdout = out
 
@@ -399,6 +399,22 @@ class TestHostCleanup(TestCase):
 
     sys.stdout = sys.__stdout__
 
+
+  @patch('os.path.isfile')
+  @patch('os.unlink')
+  def test_do_remove_hdp_select_marker(self, unlink_mock, isfile_mock):
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+    isfile_mock.return_value = True
+
+    self.hostcleanup.do_remove_hdp_select_marker()
+
+    self.assertTrue(unlink_mock.called)
+
+    sys.stdout = sys.__stdout__
+
+
   @patch.object(HostCleanup.HostCleanup, 'get_files_in_dir')
   @patch.object(OSCheck, "get_os_type")
   def test_find_repo_files_for_repos(self, get_os_type_method,

http://git-wip-us.apache.org/repos/asf/ambari/blob/1154fe4b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index f256688..3b7bc54 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -20,10 +20,14 @@ import os
 from resource_management import *
 
 def setup_hdp_install_directory():
+  # This is a name of marker file.
+  SELECT_ALL_PERFORMED_MARKER = "/var/lib/ambari-agent/data/hdp-select-set-all.performed"
   import params
   if params.hdp_stack_version != "" and compare_versions(params.stack_version_unformatted, '2.2') >= 0:
-    Execute(format('sudo /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{stack_version_unformatted} | tail -1`'),
-            only_if=format('ls -d /usr/hdp/{stack_version_unformatted}*')
+    Execute(format('sudo touch {SELECT_ALL_PERFORMED_MARKER} ; ' +
+                   'sudo /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{stack_version_unformatted} | tail -1`'),
+            only_if=format('ls -d /usr/hdp/{stack_version_unformatted}*'),   # If any HDP version is installed
+            not_if=format("test -f {SELECT_ALL_PERFORMED_MARKER}")           # Do that only once (otherwise we break rolling upgrade logic)
     )
 
 def setup_config():


[04/14] ambari git commit: AMBARI-9149. Test principal and keytab required for service check should be created as part of kerberos service check action (rlevas)

Posted by yu...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 0b58fc0..c6c9574 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -70,6 +70,8 @@ import org.junit.Test;
 import javax.persistence.EntityManager;
 
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -169,13 +171,49 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     final Map<String, String> clusterEnvProperties = createNiceMock(Map.class);
     expect(clusterEnvProperties.get("security_enabled")).andReturn("true").once();
-    expect(clusterEnvProperties.get("kerberos_domain")).andReturn("FOOBAR.COM").once();
 
     final Config clusterEnvConfig = createNiceMock(Config.class);
     expect(clusterEnvConfig.getProperties()).andReturn(clusterEnvProperties).once();
 
+    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    expect(clusterEnvProperties.get("ldap_url")).andReturn("").once();
+    expect(clusterEnvProperties.get("container_dn")).andReturn("").once();
+
+    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
+
+    final Cluster cluster = createNiceMock(Cluster.class);
+    expect(cluster.getDesiredConfigByType("cluster-env")).andReturn(clusterEnvConfig).once();
+    expect(cluster.getDesiredConfigByType("kerberos-env")).andReturn(kerberosEnvConfig).once();
+
+    final KerberosDescriptor kerberosDescriptor = createNiceMock(KerberosDescriptor.class);
+
+    replayAll();
+    kerberosHelper.toggleKerberos(cluster, kerberosDescriptor, null);
+    verifyAll();
+  }
+
+  @Test(expected = AmbariException.class)
+  public void testMissingKerberosEnvConf() throws Exception {
+    KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
+
+    final Map<String, String> clusterEnvProperties = createNiceMock(Map.class);
+    expect(clusterEnvProperties.get("security_enabled")).andReturn("true").once();
+
+    final Config clusterEnvConfig = createNiceMock(Config.class);
+    expect(clusterEnvConfig.getProperties()).andReturn(clusterEnvProperties).once();
+
+    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    expect(krb5ConfProperties.get("kdc_host")).andReturn("10.0.100.1").once();
+    expect(krb5ConfProperties.get("kadmin_host")).andReturn("10.0.100.1").once();
+    expect(krb5ConfProperties.get("realm")).andReturn("EXAMPLE.COM").once();
+
+    final Config krb5ConfConfig = createNiceMock(Config.class);
+    expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
+
     final Cluster cluster = createNiceMock(Cluster.class);
     expect(cluster.getDesiredConfigByType("cluster-env")).andReturn(clusterEnvConfig).once();
+    expect(cluster.getDesiredConfigByType("krb5-conf")).andReturn(krb5ConfConfig).once();
 
     final KerberosDescriptor kerberosDescriptor = createNiceMock(KerberosDescriptor.class);
 
@@ -193,8 +231,7 @@ public class KerberosHelperTest extends EasyMockSupport {
   public void testEnableKerberosMissingCredentials() throws Exception {
     try {
       testEnableKerberos(null);
-    }
-    catch (IllegalArgumentException e) {
+    } catch (IllegalArgumentException e) {
       Assert.assertTrue(e.getMessage().startsWith("Missing KDC administrator credentials"));
       throw e;
     }
@@ -204,8 +241,32 @@ public class KerberosHelperTest extends EasyMockSupport {
   public void testEnableKerberosInvalidCredentials() throws Exception {
     try {
       testEnableKerberos(new KerberosCredential("invalid_principal", "password", "keytab"));
+    } catch (IllegalArgumentException e) {
+      Assert.assertTrue(e.getMessage().startsWith("Invalid KDC administrator credentials"));
+      throw e;
+    }
+  }
+
+  @Test
+  public void testEnsureIdentities() throws Exception {
+    testEnsureIdentities(new KerberosCredential("principal", "password", "keytab"));
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testEnsureIdentitiesMissingCredentials() throws Exception {
+    try {
+      testEnsureIdentities(null);
+    } catch (IllegalArgumentException e) {
+      Assert.assertTrue(e.getMessage().startsWith("Missing KDC administrator credentials"));
+      throw e;
     }
-    catch (IllegalArgumentException e) {
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testEnsureIdentitiesInvalidCredentials() throws Exception {
+    try {
+      testEnsureIdentities(new KerberosCredential("invalid_principal", "password", "keytab"));
+    } catch (IllegalArgumentException e) {
       Assert.assertTrue(e.getMessage().startsWith("Invalid KDC administrator credentials"));
       throw e;
     }
@@ -246,7 +307,6 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     final Map<String, String> clusterEnvProperties = createNiceMock(Map.class);
     expect(clusterEnvProperties.get("security_enabled")).andReturn("true").once();
-    expect(clusterEnvProperties.get("kerberos_domain")).andReturn("FOOBAR.COM").once();
 
     final Config clusterEnvConfig = createNiceMock(Config.class);
     expect(clusterEnvConfig.getProperties()).andReturn(clusterEnvProperties).once();
@@ -260,6 +320,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
     expect(krb5ConfProperties.get("kdc_type")).andReturn("mit-kdc").once();
+    expect(krb5ConfProperties.get("realm")).andReturn("FOOBAR.COM").once();
 
     final Config krb5ConfConfig = createNiceMock(Config.class);
     // TODO: (rlevas) Remove when AMBARI 9121 is complete
@@ -293,8 +354,8 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(cluster.getCurrentStackVersion())
         .andReturn(new StackId("HDP", "2.2"))
         .anyTimes();
-    expect(cluster.getSessionAttributes()).andReturn(new HashMap<String, Object>(){{
-      if(kerberosCredential != null) {
+    expect(cluster.getSessionAttributes()).andReturn(new HashMap<String, Object>() {{
+      if (kerberosCredential != null) {
         put("kerberos_admin/" + KerberosCredential.KEY_NAME_PRINCIPAL, kerberosCredential.getPrincipal());
         put("kerberos_admin/" + KerberosCredential.KEY_NAME_PASSWORD, kerberosCredential.getPassword());
         put("kerberos_admin/" + KerberosCredential.KEY_NAME_KEYTAB, kerberosCredential.getKeytab());
@@ -409,23 +470,28 @@ public class KerberosHelperTest extends EasyMockSupport {
     // This is a STRICT mock to help ensure that the end result is what we want.
     final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
     // Create Principals Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
     requestStageContainer.addStages(anyObject(List.class));
     expectLastCall().once();
     // Create Keytabs Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(0L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
     requestStageContainer.addStages(anyObject(List.class));
     expectLastCall().once();
     // Distribute Keytabs Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(1L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
     requestStageContainer.addStages(anyObject(List.class));
     expectLastCall().once();
     // Update Configs Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(2L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
     requestStageContainer.addStages(anyObject(List.class));
     expectLastCall().once();
     // TODO: Add more of these when more stages are added.
     // Clean-up/Finalize Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(3L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
     requestStageContainer.addStages(anyObject(List.class));
     expectLastCall().once();
@@ -439,4 +505,251 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     verifyAll();
   }
+
+  private void testEnsureIdentities(final KerberosCredential kerberosCredential) throws Exception {
+    KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
+
+    final ServiceComponentHost sch1 = createMock(ServiceComponentHost.class);
+    expect(sch1.getServiceName()).andReturn("SERVICE1").once();
+    expect(sch1.getServiceComponentName()).andReturn("COMPONENT1").once();
+    expect(sch1.getHostName()).andReturn("host1").once();
+
+    final ServiceComponentHost sch2 = createStrictMock(ServiceComponentHost.class);
+    expect(sch2.getServiceName()).andReturn("SERVICE2").once();
+
+    final ServiceComponentHost sch3 = createStrictMock(ServiceComponentHost.class);
+    expect(sch3.getServiceName()).andReturn("SERVICE3").once();
+    expect(sch3.getServiceComponentName()).andReturn("COMPONENT3").once();
+    expect(sch3.getHostName()).andReturn("host1").once();
+
+    final Host host = createNiceMock(Host.class);
+    expect(host.getHostName()).andReturn("host1").once();
+    expect(host.getState()).andReturn(HostState.HEALTHY).once();
+
+    final Service service1 = createStrictMock(Service.class);
+    expect(service1.getName()).andReturn("SERVICE1").anyTimes();
+    expect(service1.getServiceComponents())
+        .andReturn(Collections.<String, ServiceComponent>emptyMap())
+        .once();
+
+    final Service service2 = createStrictMock(Service.class);
+    expect(service2.getName()).andReturn("SERVICE2").anyTimes();
+    expect(service2.getServiceComponents())
+        .andReturn(Collections.<String, ServiceComponent>emptyMap())
+        .once();
+
+    final Map<String, String> clusterEnvProperties = createNiceMock(Map.class);
+    expect(clusterEnvProperties.get("security_enabled")).andReturn("true").once();
+
+    final Config clusterEnvConfig = createNiceMock(Config.class);
+    expect(clusterEnvConfig.getProperties()).andReturn(clusterEnvProperties).once();
+
+    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    // TODO: (rlevas) Add when AMBARI 9121 is complete
+    // expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
+
+    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
+
+    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    expect(krb5ConfProperties.get("kdc_type")).andReturn("mit-kdc").once();
+    expect(krb5ConfProperties.get("realm")).andReturn("FOOBAR.COM").once();
+
+    final Config krb5ConfConfig = createNiceMock(Config.class);
+    // TODO: (rlevas) Remove when AMBARI 9121 is complete
+    expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
+
+    final MaintenanceStateHelper maintenanceStateHelper = injector.getInstance(MaintenanceStateHelper.class);
+    expect(maintenanceStateHelper.getEffectiveState(anyObject(ServiceComponentHost.class)))
+        .andReturn(MaintenanceState.OFF).anyTimes();
+
+    final Cluster cluster = createNiceMock(Cluster.class);
+    expect(cluster.getDesiredConfigByType("cluster-env")).andReturn(clusterEnvConfig).once();
+    expect(cluster.getDesiredConfigByType("krb5-conf")).andReturn(krb5ConfConfig).once();
+    expect(cluster.getDesiredConfigByType("kerberos-env")).andReturn(kerberosEnvConfig).once();
+    expect(cluster.getClusterName()).andReturn("c1").anyTimes();
+    expect(cluster.getServices())
+        .andReturn(new HashMap<String, Service>() {
+          {
+            put("SERVICE1", service1);
+            put("SERVICE2", service2);
+          }
+        })
+        .anyTimes();
+    expect(cluster.getServiceComponentHosts("host1"))
+        .andReturn(new ArrayList<ServiceComponentHost>() {
+          {
+            add(sch1);
+            add(sch2);
+            add(sch3);
+          }
+        })
+        .once();
+    expect(cluster.getCurrentStackVersion())
+        .andReturn(new StackId("HDP", "2.2"))
+        .anyTimes();
+    expect(cluster.getSessionAttributes()).andReturn(new HashMap<String, Object>() {{
+      if (kerberosCredential != null) {
+        put("kerberos_admin/" + KerberosCredential.KEY_NAME_PRINCIPAL, kerberosCredential.getPrincipal());
+        put("kerberos_admin/" + KerberosCredential.KEY_NAME_PASSWORD, kerberosCredential.getPassword());
+        put("kerberos_admin/" + KerberosCredential.KEY_NAME_KEYTAB, kerberosCredential.getKeytab());
+      }
+    }}).anyTimes();
+
+    final Clusters clusters = injector.getInstance(Clusters.class);
+    expect(clusters.getHostsForCluster("c1"))
+        .andReturn(new HashMap<String, Host>() {
+          {
+            put("host1", host);
+          }
+        })
+        .once();
+
+    final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host1"))
+        .andReturn(Collections.<String, Map<String, String>>emptyMap())
+        .once();
+    expect(ambariManagementController.getRoleCommandOrder(cluster))
+        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .once();
+
+    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
+        .andReturn(new HashMap<String, Map<String, String>>() {
+          {
+            put("cluster-env", new HashMap<String, String>() {{
+              put("kerberos_domain", "FOOBAR.COM");
+            }});
+          }
+        })
+        .once();
+    expect(configHelper.getEffectiveConfigAttributes(anyObject(Cluster.class), anyObject(Map.class)))
+        .andReturn(Collections.<String, Map<String, Map<String, String>>>emptyMap())
+        .once();
+
+    final KerberosPrincipalDescriptor principalDescriptor1 = createMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor1.getValue()).andReturn("component1/_HOST@${realm}").anyTimes();
+    expect(principalDescriptor1.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
+    expect(principalDescriptor1.getLocalUsername()).andReturn(null).anyTimes();
+    expect(principalDescriptor1.getConfiguration()).andReturn("service1-site/component1.kerberos.principal").anyTimes();
+
+    final KerberosPrincipalDescriptor principalDescriptor3 = createMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor3.getValue()).andReturn("component3/${host}@${realm}").anyTimes();
+    expect(principalDescriptor3.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
+    expect(principalDescriptor3.getLocalUsername()).andReturn(null).anyTimes();
+    expect(principalDescriptor3.getConfiguration()).andReturn("service3-site/component3.kerberos.principal").anyTimes();
+
+    final KerberosKeytabDescriptor keytabDescriptor1 = createMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").once();
+    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").once();
+    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").once();
+    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
+    expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
+    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
+
+    final KerberosKeytabDescriptor keytabDescriptor3 = createMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptor3.getFile()).andReturn("${keytab_dir}/service3.keytab").once();
+    expect(keytabDescriptor3.getOwnerName()).andReturn("service3").once();
+    expect(keytabDescriptor3.getOwnerAccess()).andReturn("rw").once();
+    expect(keytabDescriptor3.getGroupName()).andReturn("hadoop").once();
+    expect(keytabDescriptor3.getGroupAccess()).andReturn("").once();
+    expect(keytabDescriptor3.getConfiguration()).andReturn("service3-site/component3.keytab.file").once();
+
+    final KerberosIdentityDescriptor identityDescriptor1a = createMock(KerberosIdentityDescriptor.class);
+    expect(identityDescriptor1a.getName()).andReturn("identity1a").anyTimes();
+    expect(identityDescriptor1a.getPrincipalDescriptor()).andReturn(principalDescriptor1).anyTimes();
+    expect(identityDescriptor1a.getKeytabDescriptor()).andReturn(keytabDescriptor1).anyTimes();
+
+    final KerberosIdentityDescriptor identityDescriptor1b = createMock(KerberosIdentityDescriptor.class);
+    expect(identityDescriptor1b.getName()).andReturn("identity1b").anyTimes();
+
+    final KerberosIdentityDescriptor identityDescriptor3 = createMock(KerberosIdentityDescriptor.class);
+    expect(identityDescriptor3.getName()).andReturn("identity3").anyTimes();
+    expect(identityDescriptor3.getPrincipalDescriptor()).andReturn(principalDescriptor3).anyTimes();
+    expect(identityDescriptor3.getKeytabDescriptor()).andReturn(keytabDescriptor3).anyTimes();
+
+    final KerberosComponentDescriptor componentDescriptor1 = createStrictMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor1.getIdentities(true)).
+        andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
+          add(identityDescriptor1a);
+          add(identityDescriptor1b);
+        }}).once();
+    expect(componentDescriptor1.getConfigurations(true)).andReturn(null).once();
+
+    final KerberosComponentDescriptor componentDescriptor3 = createStrictMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor3.getIdentities(true)).
+        andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
+          add(identityDescriptor3);
+        }}).once();
+    expect(componentDescriptor3.getConfigurations(true)).andReturn(null).once();
+
+    final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
+    expect(serviceDescriptor1.getIdentities(true)).andReturn(null).once();
+    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).once();
+
+    final KerberosServiceDescriptor serviceDescriptor3 = createMock(KerberosServiceDescriptor.class);
+    expect(serviceDescriptor3.getIdentities(true)).andReturn(null).once();
+    expect(serviceDescriptor3.getComponent("COMPONENT3")).andReturn(componentDescriptor3).once();
+
+    final KerberosDescriptor kerberosDescriptor = createStrictMock(KerberosDescriptor.class);
+    expect(kerberosDescriptor.getProperties()).andReturn(null).once();
+    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).once();
+    expect(kerberosDescriptor.getService("SERVICE3")).andReturn(serviceDescriptor3).once();
+
+    final StageFactory stageFactory = injector.getInstance(StageFactory.class);
+    expect(stageFactory.createNew(anyLong(), anyObject(String.class), anyObject(String.class),
+        anyLong(), anyObject(String.class), anyObject(String.class), anyObject(String.class),
+        anyObject(String.class)))
+        .andAnswer(new IAnswer<Stage>() {
+          @Override
+          public Stage answer() throws Throwable {
+            Stage stage = createNiceMock(Stage.class);
+
+            expect(stage.getHostRoleCommands())
+                .andReturn(Collections.<String, Map<String, HostRoleCommand>>emptyMap())
+                .anyTimes();
+            replay(stage);
+            return stage;
+          }
+        })
+        .anyTimes();
+
+    // This is a STRICT mock to help ensure that the end result is what we want.
+    final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
+    // Create Principals Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
+    // Create Keytabs Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
+    // Distribute Keytabs Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
+    // Clean-up/Finalize Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
+
+    replayAll();
+
+    // Needed by infrastructure
+    injector.getInstance(AmbariMetaInfo.class).init();
+
+    Map<String, Collection<String>> serviceComponentFilter = new HashMap<String, Collection<String>>();
+    Collection<String> identityFilter = Arrays.asList("identity1a", "identity3");
+
+    serviceComponentFilter.put("SERVICE3", Collections.singleton("COMPONENT3"));
+    serviceComponentFilter.put("SERVICE1", null);
+
+    kerberosHelper.ensureIdentities(cluster, kerberosDescriptor, serviceComponentFilter, identityFilter, requestStageContainer);
+
+    verifyAll();
+  }
 }


[06/14] ambari git commit: AMBARI-9228. Ambari Server setup to install and copy JCE policy file in-place (handle both Default / Custom JDK scenarios) (rlevas)

Posted by yu...@apache.org.
AMBARI-9228. Ambari Server setup to install and copy JCE policy file in-place (handle both Default / Custom JDK scenarios) (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/049b6924
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/049b6924
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/049b6924

Branch: refs/heads/2.0-preview
Commit: 049b692472151a4c3488afd2157dcd2ff509f265
Parents: 339e8a7
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Jan 21 15:32:27 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 12:36:15 2015 -0800

----------------------------------------------------------------------
 ambari-server/sbin/ambari-server                |   6 +-
 ambari-server/src/main/python/ambari-server.py  | 195 +++++++++++++++++++
 .../python/ambari_server/serverConfiguration.py |   2 +
 .../src/test/python/TestAmbariServer.py         | 138 ++++++++++++-
 4 files changed, 337 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/049b6924/ambari-server/sbin/ambari-server
----------------------------------------------------------------------
diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server
index c034f5d..168790b 100755
--- a/ambari-server/sbin/ambari-server
+++ b/ambari-server/sbin/ambari-server
@@ -111,6 +111,10 @@ case "$1" in
         echo -e "Setup ambari-server"
         $PYTHON /usr/sbin/ambari-server.py $@
         ;;
+  setup-jce)
+        echo -e "Updating jce policy"
+        $PYTHON /usr/sbin/ambari-server.py $@
+        ;;
   setup-ldap)
         echo -e "Setting up LDAP properties..."
         $PYTHON /usr/sbin/ambari-server.py $@
@@ -137,7 +141,7 @@ case "$1" in
         ;;
   *)
         echo "Usage: /usr/sbin/ambari-server
-        {start|stop|restart|setup|upgrade|status|upgradestack|setup-ldap|sync-ldap|setup-security|refresh-stack-hash|backup|restore} [options]
+        {start|stop|restart|setup|setup-jce|upgrade|status|upgradestack|setup-ldap|sync-ldap|setup-security|refresh-stack-hash|backup|restore} [options]
         Use usr/sbin/ambari-server <action> --help to get details on options available.
         Or, simply invoke ambari-server.py --help to print the options."
         exit 1

http://git-wip-us.apache.org/repos/asf/ambari/blob/049b6924/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 7c08083..b75611f 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -81,6 +81,7 @@ UPGRADE_ACTION = "upgrade"
 UPGRADE_STACK_ACTION = "upgradestack"
 STATUS_ACTION = "status"
 SETUP_HTTPS_ACTION = "setup-https"
+SETUP_JCE_ACTION = "setup-jce"
 LDAP_SETUP_ACTION = "setup-ldap"
 LDAP_SYNC_ACTION = "sync-ldap"
 SETUP_GANGLIA_HTTPS_ACTION = "setup-ganglia-https"
@@ -403,6 +404,7 @@ DEFAULT_JDK16_LOCATION = "/usr/jdk64/jdk1.6.0_31"
 JDK_INDEX = 0
 JDK_VERSION_REs = ["(jdk.*)/jre", "Creating (jdk.*)/jre"]
 CUSTOM_JDK_NUMBER = "3"
+IS_CUSTOM_JDK = False
 JDK_MIN_FILESIZE = 5000
 CREATE_JDK_DIR_CMD = "/bin/mkdir -p " + configDefaults.JDK_INSTALL_DIR
 MAKE_FILE_EXECUTABLE_CMD = "chmod a+x {0}"
@@ -1560,6 +1562,7 @@ def install_jce_manualy(args):
 #
 def download_jdk(args):
   global JDK_INDEX
+  global IS_CUSTOM_JDK
   properties = get_ambari_properties()
   if properties == -1:
     err = "Error getting ambari properties"
@@ -1604,6 +1607,7 @@ Enter choice (""" + jdk_num + "):",
     )
 
     if jdk_num == CUSTOM_JDK_NUMBER:
+      IS_CUSTOM_JDK = True
       print_warning_msg("JDK must be installed on all hosts and JAVA_HOME must be valid on all hosts.")
       print_warning_msg(jcePolicyWarn)
       args.java_home = get_validated_string_input("Path to JAVA_HOME: ", None, None, None, False, False)
@@ -2025,6 +2029,174 @@ def verify_setup_allowed():
   return 0
 
 
+def unpack_jce_policy():
+  properties = get_ambari_properties()
+  jdk_path = properties.get_property(JAVA_HOME_PROPERTY)
+  jdk_security_path = jdk_path + os.sep + configDefaults.JDK_SECURITY_DIR
+
+  jce_name = properties.get_property(JCE_NAME_PROPERTY)
+  jce_zip_path = configDefaults.SERVER_RESOURCES_DIR + os.sep + jce_name
+  unpack_cmd = 'unzip -o -j -q {0} -d {1}'.format(jce_zip_path, jdk_security_path)
+
+  if os.path.exists(jdk_security_path) and os.path.exists(jce_zip_path) and validate_jdk(jdk_path):
+    try:
+      retcode, out, err = run_os_command(unpack_cmd)
+      if retcode != 0:
+        raise FatalException(retcode, err)
+    except Exception as e:
+      err = "Fail during the execution of '{0}'. {1}".format(unpack_cmd.format(jce_zip_path, jdk_security_path), e)
+      raise FatalException(1, err)
+  else:
+    err = "Can not execute {0}. The path {1}, {2} or {3} is invalid.".format(unpack_cmd, jdk_security_path, jce_zip_path, jdk_path)
+    raise FatalException(1, err)
+
+#
+# Setup the JCE policy for Ambari Server.
+#
+def setup_jce_policy(path):
+  if os.path.exists(path):
+    copy_cmd = 'cp {0} {1}'.format(path, configDefaults.SERVER_RESOURCES_DIR)
+    try:
+      retcode, out, err = run_os_command(copy_cmd)
+      if retcode != 0:
+        raise FatalException(retcode, err)
+    except Exception as e:
+      err = "Fail during the execution of '{0}'. {1}".format(copy_cmd.format(path, configDefaults.SERVER_RESOURCES_DIR), e)
+      raise FatalException(1, err)
+  else:
+    err = "Can not run 'setup-jce'. Invalid path {0}.".format(path)
+    raise FatalException(1, err)
+  conf_file = search_file(AMBARI_PROPERTIES_FILE, get_conf_dir())
+  properties = get_ambari_properties()
+  zip_name = os.path.split(path)[1]
+  properties.process_pair(JCE_NAME_PROPERTY, zip_name)
+  try:
+    properties.store(open(conf_file, "w"))
+  except Exception, e:
+    print_error_msg('Could not write ambari config file "%s": %s' % (conf_file, e))
+
+  print 'Installing JCE policy...'
+  try:
+    unpack_jce_policy()
+  except FatalException as e:
+    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
+    raise FatalException(e.code, err)
+  print 'NOTE: Restart Ambari Server to apply changes' + \
+        ' ("ambari-server restart|stop|start")'
+
+
+def unpack_jce_policy():
+  properties = get_ambari_properties()
+  jdk_path = properties.get_property(JAVA_HOME_PROPERTY)
+  jdk_security_path = jdk_path + os.sep + configDefaults.JDK_SECURITY_DIR
+
+  jce_name = properties.get_property(JCE_NAME_PROPERTY)
+  jce_zip_path = configDefaults.SERVER_RESOURCES_DIR + os.sep + jce_name
+  unpack_cmd = 'unzip -o -j -q {0} -d {1}'.format(jce_zip_path, jdk_security_path)
+
+  if os.path.exists(jdk_security_path) and os.path.exists(jce_zip_path) and validate_jdk(jdk_path):
+    try:
+      retcode, out, err = run_os_command(unpack_cmd)
+      if retcode != 0:
+        raise FatalException(retcode, err)
+    except Exception as e:
+      err = "Fail during the execution of '{0}'. {1}".format(unpack_cmd.format(jce_zip_path, jdk_security_path), e)
+      raise FatalException(1, err)
+  else:
+    err = "Can not execute {0}. The path {1}, {2} or {3} is invalid.".format(unpack_cmd, jdk_security_path, jce_zip_path, jdk_path)
+    raise FatalException(1, err)
+
+#
+# Setup the JCE policy for Ambari Server.
+#
+def setup_jce_policy(path):
+  if os.path.exists(path):
+    copy_cmd = 'cp {0} {1}'.format(path, configDefaults.SERVER_RESOURCES_DIR)
+    try:
+      retcode, out, err = run_os_command(copy_cmd)
+      if retcode != 0:
+        raise FatalException(retcode, err)
+    except Exception as e:
+      err = "Fail during the execution of '{0}'. {1}".format(copy_cmd.format(path, configDefaults.SERVER_RESOURCES_DIR), e)
+      raise FatalException(1, err)
+  else:
+    err = "Can not run 'setup-jce'. Invalid path {0}.".format(path)
+    raise FatalException(1, err)
+  conf_file = search_file(AMBARI_PROPERTIES_FILE, get_conf_dir())
+  properties = get_ambari_properties()
+  zip_name = os.path.split(path)[1]
+  properties.process_pair(JCE_NAME_PROPERTY, zip_name)
+  try:
+    properties.store(open(conf_file, "w"))
+  except Exception, e:
+    print_error_msg('Could not write ambari config file "%s": %s' % (conf_file, e))
+
+  print 'Installing JCE policy...'
+  try:
+    unpack_jce_policy()
+  except FatalException as e:
+    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
+    raise FatalException(e.code, err)
+  print 'NOTE: Restart Ambari Server to apply changes' + \
+        ' ("ambari-server restart|stop|start")'
+
+
+def unpack_jce_policy():
+  properties = get_ambari_properties()
+  jdk_path = properties.get_property(JAVA_HOME_PROPERTY)
+  jdk_security_path = jdk_path + os.sep + configDefaults.JDK_SECURITY_DIR
+
+  jce_name = properties.get_property(JCE_NAME_PROPERTY)
+  jce_zip_path = configDefaults.SERVER_RESOURCES_DIR + os.sep + jce_name
+  unpack_cmd = 'unzip -o -j -q {0} -d {1}'.format(jce_zip_path, jdk_security_path)
+
+  if os.path.exists(jdk_security_path) and os.path.exists(jce_zip_path) and validate_jdk(jdk_path):
+    try:
+      retcode, out, err = run_os_command(unpack_cmd)
+      if retcode != 0:
+        raise FatalException(retcode, err)
+    except Exception as e:
+      err = "Fail during the execution of '{0}'. {1}".format(unpack_cmd.format(jce_zip_path, jdk_security_path), e)
+      raise FatalException(1, err)
+  else:
+    err = "Can not execute {0}. The path {1}, {2} or {3} is invalid.".format(unpack_cmd, jdk_security_path, jce_zip_path, jdk_path)
+    raise FatalException(1, err)
+
+#
+# Setup the JCE policy for Ambari Server.
+#
+def setup_jce_policy(path):
+  if os.path.exists(path):
+    copy_cmd = 'cp {0} {1}'.format(path, configDefaults.SERVER_RESOURCES_DIR)
+    try:
+      retcode, out, err = run_os_command(copy_cmd)
+      if retcode != 0:
+        raise FatalException(retcode, err)
+    except Exception as e:
+      err = "Fail during the execution of '{0}'. {1}".format(copy_cmd.format(path, configDefaults.SERVER_RESOURCES_DIR), e)
+      raise FatalException(1, err)
+  else:
+    err = "Can not run 'setup-jce'. Invalid path {0}.".format(path)
+    raise FatalException(1, err)
+  conf_file = search_file(AMBARI_PROPERTIES_FILE, get_conf_dir())
+  properties = get_ambari_properties()
+  zip_name = os.path.split(path)[1]
+  properties.process_pair(JCE_NAME_PROPERTY, zip_name)
+  try:
+    properties.store(open(conf_file, "w"))
+  except Exception, e:
+    print_error_msg('Could not write ambari config file "%s": %s' % (conf_file, e))
+
+  print 'Installing JCE policy...'
+  try:
+    unpack_jce_policy()
+  except FatalException as e:
+    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
+    raise FatalException(e.code, err)
+  print 'NOTE: Restart Ambari Server to apply changes' + \
+        ' ("ambari-server restart|stop|start")'
+
+
 #
 # Setup the Ambari Server.
 #
@@ -2083,6 +2255,24 @@ def setup(args):
     err = 'Downloading or installing JDK failed: {0}. Exiting.'.format(e)
     raise FatalException(e.code, err)
 
+  print 'Installing JCE policy...'
+  try:
+    unpack_jce_policy()
+  except FatalException as e:
+    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
+    raise FatalException(e.code, err)
+
+
+
+  if not IS_CUSTOM_JDK: # If it's not a custom JDK, will also install JCE policy automatically
+    print 'Installing JCE policy...'
+    try:
+      unpack_jce_policy()
+    except FatalException as e:
+      err = 'Installing JCE failed: {0}. Exiting.'.format(e)
+      raise FatalException(e.code, err)
+
+
   print 'Completing setup...'
   retcode = configure_os_settings()
   if not retcode == 0:
@@ -4223,6 +4413,8 @@ def main():
     possible_args_numbers = [2,4] # OR
   elif action == BACKUP_ACTION or action == RESTORE_ACTION:
     possible_args_numbers = [1,2]
+  elif action == SETUP_JCE_ACTION:
+    possible_args_numbers = [2]
   else:
     possible_args_numbers = [1]
 
@@ -4244,6 +4436,9 @@ def main():
       start(options)
     elif action == STOP_ACTION:
       stop(options)
+    elif action == SETUP_JCE_ACTION:
+      path = args[1]
+      setup_jce_policy(path)
     elif action == RESET_ACTION:
       reset(options)
     elif action == STATUS_ACTION:

http://git-wip-us.apache.org/repos/asf/ambari/blob/049b6924/ambari-server/src/main/python/ambari_server/serverConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverConfiguration.py b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
index 44c30ce..6964719 100644
--- a/ambari-server/src/main/python/ambari_server/serverConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
@@ -156,6 +156,8 @@ class ServerConfigDefaults(object):
     self.JDK_INSTALL_DIR = ""
     self.JDK_SEARCH_PATTERN = ""
     self.JAVA_EXE_SUBPATH = ""
+    self.JDK_SECURITY_DIR = "jre/lib/security"
+    self.SERVER_RESOURCES_DIR = "/var/lib/ambari-server/resources"
 
     # Configuration defaults
     self.DEFAULT_CONF_DIR = ""

http://git-wip-us.apache.org/repos/asf/ambari/blob/049b6924/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 408c088..e0b390c 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -2418,6 +2418,119 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     result = _ambari_server_.find_jdk()
     self.assertEqual(result, "two")
 
+  @patch.object(_ambari_server_, "get_ambari_properties")
+  @patch("os.path.exists")
+  @patch.object(_ambari_server_, "run_os_command")
+  @patch.object(_ambari_server_, "validate_jdk")
+  def test_unpack_jce_policy(self, validate_jdk_mock, run_os_command_mock, exists_mock, get_ambari_properties_mock):
+    properties = MagicMock()
+    get_ambari_properties_mock.return_value = properties
+    exists_mock.return_value = True
+    run_os_command_mock.return_value = 0 , "", ""
+    validate_jdk_mock.return_value = True
+
+    _ambari_server_.unpack_jce_policy()
+    self.assertTrue(run_os_command_mock.called)
+    self.assertTrue(validate_jdk_mock.called)
+
+    # Testing with bad jdk_security_path or jce_zip_path
+    exists_mock.return_value = False
+    try:
+      _ambari_server_.unpack_jce_policy()
+    except FatalException:
+      self.assertTrue(True)
+    exists_mock.return_value = True
+
+    # Testing with bad jdk path
+    validate_jdk_mock.return_value = False
+    try:
+      _ambari_server_.unpack_jce_policy()
+    except FatalException:
+      self.assertTrue(True)
+    validate_jdk_mock.return_value = True
+
+    # Testing with return code distinct to 0 for run_os_command
+    run_os_command_mock.return_value = 3 , "", ""
+    try:
+      _ambari_server_.unpack_jce_policy()
+    except FatalException:
+      self.assertTrue(True)
+    run_os_command_mock.return_value = 0 , "", ""
+
+    # Testing with an error produced by run_os_command
+    run_os_command_mock.reset_mock()
+    run_os_command_mock.side_effect = FatalException(1, "The command fails.")
+    try:
+      _ambari_server_.unpack_jce_policy()
+    except FatalException:
+      self.assertTrue(True)
+
+  @patch("os.path.exists")
+  @patch.object(_ambari_server_, "run_os_command")
+  @patch("os.path.split")
+  @patch.object(_ambari_server_, "unpack_jce_policy")
+  @patch.object(_ambari_server_, "get_ambari_properties")
+  @patch.object(_ambari_server_, "search_file")
+  @patch("__builtin__.open")
+  def test_setup_jce_policy(self, open_mock, search_file_mock, get_ambari_properties_mock, unpack_jce_policy_mock, split_mock, run_os_command_mock, exists_mock):
+    exists_mock.return_value = True
+    run_os_command_mock.return_value = 0 , "", ""
+    properties = MagicMock()
+    unpack_jce_policy_mock.return_value = 0
+    get_ambari_properties_mock.return_value = properties
+    conf_file = 'etc/ambari-server/conf/ambari.properties'
+    search_file_mock.return_value = conf_file
+    split_mock.return_value = [_ambari_server_.configDefaults.SERVER_RESOURCES_DIR, 'UnlimitedJCEPolicyJDK7.zip']
+
+    path = '/path/to/JCEPolicy.zip'
+    copy_cmd = 'cp {0} {1}'.format(path, _ambari_server_.configDefaults.SERVER_RESOURCES_DIR)
+
+    _ambari_server_.setup_jce_policy(path)
+    run_os_command_mock.assert_called_with(copy_cmd)
+    self.assertTrue(unpack_jce_policy_mock.called)
+    self.assertTrue(get_ambari_properties_mock.called)
+    self.assertTrue(properties.store.called)
+
+    # Testing with bad path
+    exists_mock.return_value = False
+    try:
+      _ambari_server_.setup_jce_policy(path)
+    except FatalException:
+      self.assertTrue(True)
+    exists_mock.return_value = True
+
+    # Testing with return code distinct to 0 for run_os_command
+    run_os_command_mock.return_value = 2, "", "Fail"
+    try:
+      _ambari_server_.setup_jce_policy(path)
+    except FatalException:
+      self.assertTrue(True)
+    run_os_command_mock.return_value = 0, "", ""
+
+    # Testing with an error produced by run_os_command
+    run_os_command_mock.reset_mock()
+    run_os_command_mock.side_effect = FatalException(1, "The command fails.")
+    try:
+      _ambari_server_.setup_jce_policy(path)
+    except FatalException:
+      self.assertTrue(True)
+    run_os_command_mock.return_value = 0, "", ""
+
+    # Testing with an error produced by Properties.store function
+    properties.store.side_effect = Exception("Invalid file.")
+    try:
+      _ambari_server_.setup_jce_policy(path)
+    except Exception:
+      self.assertTrue(True)
+    properties.reset_mock()
+
+    # Testing with an error produced by unpack_jce_policy
+    unpack_jce_policy_mock.side_effect = FatalException(1, "Can not install JCE policy")
+    try:
+      _ambari_server_.setup_jce_policy(path)
+    except FatalException:
+      self.assertTrue(True)
+
   @patch("ambari_commons.firewall.run_os_command")
   @patch.object(OSCheck, "get_os_family")
   @patch.object(OSCheck, "get_os_type")
@@ -2446,7 +2559,8 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(_ambari_server_, "extract_views")
   @patch.object(_ambari_server_, "adjust_directory_permissions")
   @patch.object(_ambari_server_, 'read_ambari_user')
-  def test_setup(self, read_ambari_user_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
+  @patch.object(_ambari_server_, "unpack_jce_policy")
+  def test_setup(self, unpack_jce_policy_mock, read_ambari_user_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
                  is_local_database_mock, store_remote_properties_mock,
                  setup_remote_db_mock, check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
                  check_postgre_up_mock, setup_db_mock, configure_postgres_mock,
@@ -2465,6 +2579,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
     run_os_command_mock.return_value = 3,"",""
     extract_views_mock.return_value = 0
+    unpack_jce_policy_mock.return_value = 0
 
     def reset_mocks():
       is_jdbc_user_changed_mock.reset_mock()
@@ -2574,6 +2689,19 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     self.assertFalse(check_selinux_mock.called)
     self.assertFalse(check_ambari_user_mock.called)
 
+
+    # Test that unpack_jce_policy is called
+    reset_mocks()
+    _ambari_server_.setup(args)
+    self.assertTrue(unpack_jce_policy_mock.called)
+
+    # Testing with an error produced by unpack_jce_policy
+    unpack_jce_policy_mock.side_effect = FatalException(1, "Can not install JCE policy")
+    try:
+      _ambari_server_.setup(args)
+    except FatalException:
+      self.assertTrue(True)
+
   @patch.object(_ambari_server_, "get_remote_script_line")
   @patch.object(_ambari_server_, "is_server_runing")
   @patch.object(_ambari_server_, "get_YN_input")
@@ -3958,7 +4086,8 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(_ambari_server_, "configure_os_settings")
   @patch('__builtin__.raw_input')
   @patch.object(_ambari_server_, "check_selinux")
-  def test_setup_remote_db_wo_client(self, check_selinux_mock, raw_input, configure_os_settings_mock,
+  @patch.object(_ambari_server_, "unpack_jce_policy")
+  def test_setup_remote_db_wo_client(self, unpack_jce_policy_mock, check_selinux_mock, raw_input, configure_os_settings_mock,
                                      download_jdk_mock, check_ambari_user_mock, is_root_mock,
                                      check_jdbc_drivers_mock, is_local_db_mock,
                                      store_remote_properties_mock, get_db_cli_tool_mock, get_YN_input,
@@ -3982,6 +4111,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     download_jdk_mock.return_value = 0
     configure_os_settings_mock.return_value = 0
     verify_setup_allowed_method.return_value = 0
+    unpack_jce_policy_mock.return_value = 0
 
     try:
       _ambari_server_.setup(args)
@@ -5407,7 +5537,8 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(_ambari_server_, "adjust_directory_permissions")
   @patch("sys.exit")
   @patch('__builtin__.raw_input')
-  def test_ambariServerSetupWithCustomDbName(self, raw_input, exit_mock, adjust_dirs_mock, extract_views_mock, store_password_file_mock,
+  @patch.object(_ambari_server_, "unpack_jce_policy")
+  def test_ambariServerSetupWithCustomDbName(self, unpack_jce_policy_mock, raw_input, exit_mock, adjust_dirs_mock, extract_views_mock, store_password_file_mock,
                                              get_is_secure_mock, setup_db_mock, is_root_mock, is_local_database_mock,
                                              check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
                                              check_postgre_up_mock, configure_postgres_mock,
@@ -5438,6 +5569,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     get_os_type_mock.return_value = ""
     get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
     run_os_command_mock.return_value = 3,"",""
+    unpack_jce_policy_mock.return_value = 0
 
     new_db = "newDBName"
     args.dbms = "postgres"


[14/14] ambari git commit: AMBARI-9261. Ensure enable/disable Kerberos logic should invoke only when state of security flag is changed (rlevas)

Posted by yu...@apache.org.
AMBARI-9261. Ensure enable/disable Kerberos logic should invoke only when state of security flag is changed (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be939d32
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be939d32
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be939d32

Branch: refs/heads/2.0-preview
Commit: be939d32e161efecf27f0714a09367afbd29d08d
Parents: 790cf67
Author: Robert Levas <rl...@hortonworks.com>
Authored: Thu Jan 22 17:08:15 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Jan 22 14:16:09 2015 -0800

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java         | 62 +++++++++++++++++---
 .../AmbariManagementControllerImplTest.java     | 58 ++++++++++++++----
 .../AmbariManagementControllerTest.java         |  6 +-
 3 files changed, 102 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be939d32/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index dd18e8d..7e4ce69 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -1196,7 +1196,51 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       cluster.setClusterName(request.getClusterName());
     }
 
-    // set or create configuration mapping (and optionally create the map of properties)
+    // ----------------------
+    // Check to see if the security state is being changed... if so, attempt to enable or disable
+    // Kerberos
+    boolean toggleKerberos = false;
+
+    String desiredSecurityState = null;
+    List<ConfigurationRequest> desiredConfig = request.getDesiredConfig();
+    if (desiredConfig != null) {
+      for (ConfigurationRequest configurationRequest : desiredConfig) {
+        if ("cluster-env".equals(configurationRequest.getType())) {
+          Map<String, String> properties = configurationRequest.getProperties();
+
+          if ((properties == null) || properties.isEmpty()) {
+            Config configClusterEnv = cluster.getConfig(configurationRequest.getType(), configurationRequest.getVersionTag());
+            if (configClusterEnv != null) {
+              properties = configClusterEnv.getProperties();
+            }
+          }
+
+          desiredSecurityState = (properties == null) ? null : properties.get("security_enabled");
+        }
+      }
+    }
+
+    if(desiredSecurityState != null) {
+      Config configClusterEnv = cluster.getDesiredConfigByType("cluster-env");
+      if (configClusterEnv == null) {
+        String message = "The 'cluster-env' configuration is not available";
+        LOG.error(message);
+        throw new AmbariException(message);
+      }
+
+      Map<String, String> clusterEnvProperties = configClusterEnv.getProperties();
+      if (clusterEnvProperties == null) {
+        String message = "The 'cluster-env' configuration properties are not available";
+        LOG.error(message);
+        throw new AmbariException(message);
+      }
+
+      toggleKerberos = !desiredSecurityState.equals(clusterEnvProperties.get("security_enabled"));
+    }
+    // ----------------------
+
+
+        // set or create configuration mapping (and optionally create the map of properties)
     if (null != request.getDesiredConfig()) {
       Set<Config> configs = new HashSet<Config>();
       String note = null;
@@ -1326,13 +1370,15 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
 
     RequestStageContainer requestStageContainer = null;
-    Map<String, Service> services = cluster.getServices();
-    if ((services != null) && services.containsKey("KERBEROS")) {
-      // Handle either adding or removing Kerberos from the cluster. This may generate multiple stages
-      // or not depending the current state of the cluster.  The main configuration used to determine
-      // whether Kerberos is to be added or removed is cluster-config/security_enabled.
-      requestStageContainer = kerberosHelper.toggleKerberos(cluster,
-          request.getKerberosDescriptor(), null);
+    if(toggleKerberos) {
+      Map<String, Service> services = cluster.getServices();
+      if ((services != null) && services.containsKey("KERBEROS")) {
+        // Handle either adding or removing Kerberos from the cluster. This may generate multiple stages
+        // or not depending the current state of the cluster.  The main configuration used to determine
+        // whether Kerberos is to be added or removed is cluster-config/security_enabled.
+        requestStageContainer = kerberosHelper.toggleKerberos(cluster,
+            request.getKerberosDescriptor(), null);
+      }
     }
 
     if (requestStageContainer != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/be939d32/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index e713d7f..ab07df7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -45,6 +45,7 @@ import org.apache.ambari.server.security.ldap.LdapBatchDto;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.Service;
@@ -73,18 +74,7 @@ import java.util.TreeMap;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
+import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -507,11 +497,12 @@ public class AmbariManagementControllerImplTest {
     // requests
     Set<ClusterRequest> setRequests = Collections.singleton(clusterRequest);
 
+    KerberosHelper kerberosHelper = createStrictMock(KerberosHelper.class);
     // expectations
     injector.injectMembers(capture(controllerCapture));
     expect(injector.getInstance(Gson.class)).andReturn(null);
     expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null);
-    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class));
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(kerberosHelper);
     expect(clusterRequest.getClusterName()).andReturn("clusterNew").times(4);
     expect(clusterRequest.getClusterId()).andReturn(1L).times(6);
     expect(clusters.getClusterById(1L)).andReturn(cluster).times(2);
@@ -536,6 +527,47 @@ public class AmbariManagementControllerImplTest {
   }
 
   /**
+   * Ensure that when the cluster is updated KerberosHandler.toggleKerberos is not invoked unless
+   * the security state is altered
+   */
+  @Test
+  public void testUpdateClustersToggleKerberosNotInvoked() throws Exception {
+    // member state mocks
+    Capture<AmbariManagementController> controllerCapture = new Capture<AmbariManagementController>();
+    Injector injector = createStrictMock(Injector.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    ActionManager actionManager = createNiceMock(ActionManager.class);
+    ClusterRequest clusterRequest = createNiceMock(ClusterRequest.class);
+
+    // requests
+    Set<ClusterRequest> setRequests = Collections.singleton(clusterRequest);
+
+    KerberosHelper kerberosHelper = createStrictMock(KerberosHelper.class);
+    // expectations
+    injector.injectMembers(capture(controllerCapture));
+    expect(injector.getInstance(Gson.class)).andReturn(null);
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null);
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(kerberosHelper);
+    expect(clusterRequest.getClusterId()).andReturn(1L).times(6);
+    expect(clusters.getClusterById(1L)).andReturn(cluster).times(2);
+    expect(cluster.getClusterName()).andReturn("cluster").times(2);
+
+    cluster.addSessionAttributes(anyObject(Map.class));
+    expectLastCall().once();
+
+    // replay mocks
+    replay(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+
+    // test
+    AmbariManagementController controller = new AmbariManagementControllerImpl(actionManager, clusters, injector);
+    controller.updateClusters(setRequests, null);
+
+    // assert and verify
+    assertSame(controller, controllerCapture.getValue());
+    verify(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+  }
+
+  /**
    * Ensure that RollbackException is thrown outside the updateClusters method
    * when a unique constraint violation occurs.
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/be939d32/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 5350662..2b4bbc6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -9366,7 +9366,7 @@ public class AmbariManagementControllerTest {
     injector.injectMembers(capture(controllerCapture));
     expect(injector.getInstance(Gson.class)).andReturn(null);
     expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(maintHelper);
-    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class));
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createStrictMock(KerberosHelper.class));
 
     // getServices
     expect(clusters.getCluster("cluster1")).andReturn(cluster);
@@ -9410,7 +9410,7 @@ public class AmbariManagementControllerTest {
     injector.injectMembers(capture(controllerCapture));
     expect(injector.getInstance(Gson.class)).andReturn(null);
     expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(maintHelper);
-    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class));
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createStrictMock(KerberosHelper.class));
 
     // getServices
     expect(clusters.getCluster("cluster1")).andReturn(cluster);
@@ -9469,7 +9469,7 @@ public class AmbariManagementControllerTest {
     injector.injectMembers(capture(controllerCapture));
     expect(injector.getInstance(Gson.class)).andReturn(null);
     expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(maintHelper);
-    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class));
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createStrictMock(KerberosHelper.class));
 
     // getServices
     expect(clusters.getCluster("cluster1")).andReturn(cluster).times(4);


[11/14] ambari git commit: AMBARI-9253. Kerberos wizard->Start and Test services: all service check fails with customized smokeuser principal. (jaimin)

Posted by yu...@apache.org.
AMBARI-9253. Kerberos wizard->Start and Test services: all service check fails with customized smokeuser principal. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/786d7802
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/786d7802
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/786d7802

Branch: refs/heads/2.0-preview
Commit: 786d780293737c3b54a3655a122b5fb70fc1a3d8
Parents: 757256d
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Wed Jan 21 20:21:53 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Jan 22 08:26:03 2015 -0800

----------------------------------------------------------------------
 .../resource_management/TestExecuteHadoopResource.py     |  5 +++--
 .../libraries/resources/execute_hadoop.py                |  2 +-
 .../HBASE/0.96.0.2.0/package/scripts/params.py           |  1 +
 .../HBASE/0.96.0.2.0/package/scripts/service_check.py    |  2 +-
 .../HIVE/0.12.0.2.0/package/files/templetonSmoke.sh      |  3 ++-
 .../0.12.0.2.0/package/scripts/hive_server_upgrade.py    |  2 +-
 .../HIVE/0.12.0.2.0/package/scripts/params.py            |  1 +
 .../HIVE/0.12.0.2.0/package/scripts/service_check.py     |  2 +-
 .../0.12.0.2.0/package/scripts/webhcat_service_check.py  | 10 ++++++++--
 .../KERBEROS/1.10.3-10/package/scripts/params.py         |  2 +-
 .../KNOX/0.5.0.2.2/package/scripts/params.py             |  1 +
 .../KNOX/0.5.0.2.2/package/scripts/service_check.py      |  2 +-
 .../4.0.0.2.0/package/alerts/alert_check_oozie_server.py |  2 +-
 .../OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh         |  3 ++-
 .../OOZIE/4.0.0.2.0/package/scripts/params.py            |  1 +
 .../OOZIE/4.0.0.2.0/package/scripts/service_check.py     |  2 +-
 .../PIG/0.12.0.2.0/package/scripts/params.py             |  1 +
 .../PIG/0.12.0.2.0/package/scripts/service_check.py      |  4 +++-
 .../SLIDER/0.60.0.2.2/package/scripts/params.py          |  1 +
 .../SLIDER/0.60.0.2.2/package/scripts/service_check.py   |  2 +-
 .../SQOOP/1.4.4.2.0/package/scripts/params.py            |  1 +
 .../SQOOP/1.4.4.2.0/package/scripts/service_check.py     |  2 +-
 .../2.1.0.2.0/package/scripts/mapred_service_check.py    |  2 +-
 .../YARN/2.1.0.2.0/package/scripts/params.py             |  1 +
 .../YARN/2.1.0.2.0/package/scripts/service_check.py      |  2 +-
 .../ZOOKEEPER/3.4.5.2.0/package/files/zkSmoke.sh         |  3 ++-
 .../ZOOKEEPER/3.4.5.2.0/package/scripts/params.py        |  1 +
 .../ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py | 11 +++++++++--
 .../3.4.5.2.0/package/scripts/zookeeper_service.py       |  2 +-
 .../stacks/2.0.6/HBASE/test_hbase_service_check.py       |  4 ++--
 .../python/stacks/2.0.6/HIVE/test_hive_service_check.py  |  6 +++---
 .../python/stacks/2.0.6/PIG/test_pig_service_check.py    |  2 ++
 .../test/python/stacks/2.0.6/SQOOP/test_service_check.py |  2 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_service_check.py   |  2 +-
 .../python/stacks/2.0.6/YARN/test_yarn_service_check.py  |  2 +-
 .../stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py      |  2 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_service_check.py      |  6 +++---
 .../src/test/python/stacks/2.0.6/configs/secured.json    |  1 +
 .../src/test/python/stacks/2.1/configs/secured.json      |  1 +
 .../test/python/stacks/2.2/SLIDER/test_slider_client.py  |  2 +-
 .../src/test/python/stacks/2.2/configs/secured.json      |  1 +
 41 files changed, 69 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
index ae137a5..6ca6f36 100644
--- a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
@@ -166,6 +166,7 @@ class TestExecuteHadoopResource(TestCase):
                     kinit_path_local="path",
                     conf_dir="conf_dir",
                     user="user",
+                    principal="principal",
                     tries=1,
                     keytab="keytab",
                     security_enabled=True,
@@ -175,9 +176,9 @@ class TestExecuteHadoopResource(TestCase):
       )
       self.assertEqual(execute_mock.call_count, 2)
       self.assertEqual(str(execute_mock.call_args_list[0][0][0]),
-                       "Execute['path -kt keytab user']")
+                       "Execute['path -kt keytab principal']")
       self.assertEqual(execute_mock.call_args_list[0][0][0].command,
-                       'path -kt keytab user')
+                       'path -kt keytab principal')
       self.assertEqual(execute_mock.call_args_list[0][0][0].arguments,
                        {'path': ['/bin'], 'user': 'user'})
       self.assertEqual(execute_mock.call_args_list[1][0][0].command,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
index b4b0b52..73dd1a3 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
@@ -31,7 +31,7 @@ class ExecuteHadoop(Resource):
   try_sleep = ResourceArgument(default=0) # seconds
   user = ResourceArgument()
   logoutput = ResourceArgument()
-  principal = ResourceArgument(default=lambda obj: obj.user)
+  principal = ResourceArgument()
   bin_dir = ResourceArgument(default=[]) # appended to $PATH
   environment = ResourceArgument(default={})
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
index ace3901..e549b88 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
@@ -105,6 +105,7 @@ else:
   rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', '/clusterHostInfo/all_hosts') 
 
 smoke_test_user = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal =  config['configurations']['cluster-env']['smokeuser_principal_name']
 smokeuser_permissions = "RWXCA"
 service_check_data = functions.get_unique_id_and_date()
 user_group = config['configurations']['cluster-env']["user_group"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
index 15a306b..eea1cd1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
@@ -29,7 +29,7 @@ class HbaseServiceCheck(Script):
     
     output_file = "/apps/hbase/data/ambarismoketest"
     test_cmd = format("fs -test -e {output_file}")
-    smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_test_user};") if params.security_enabled else ""
+    smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   
     File( format("{exec_tmp_dir}/hbaseSmokeVerify.sh"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
index 22202ee..34b78f5 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
@@ -25,10 +25,11 @@ export smoke_test_user=$2
 export smoke_user_keytab=$3
 export security_enabled=$4
 export kinit_path_local=$5
+export smokeuser_principal=$6
 export ttonurl="http://${ttonhost}:50111/templeton/v1"
 
 if [[ $security_enabled == "true" ]]; then
-  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
+  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smokeuser_principal}; "
 else
   kinitcmd=""
 fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 37fe7f1..4db3df6 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -41,7 +41,7 @@ def pre_upgrade_deregister():
   Logger.info('HiveServer2 executing "deregister" command in preparation for upgrade...')
 
   if params.security_enabled:
-    kinit_command=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+    kinit_command=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}; ")
     Execute(kinit_command,user=params.smokeuser)
 
   # calculate the current hive server version

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index 00bada8..c59670b 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -148,6 +148,7 @@ smokeuser = config['configurations']['cluster-env']['smokeuser']
 smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
 smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 security_enabled = config['configurations']['cluster-env']['security_enabled']

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
index 09d31aa..9f822a9 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
@@ -34,7 +34,7 @@ class HiveServiceCheck(Script):
     port = int(format("{hive_server_port}"))
     print "Test connectivity to hive server"
     if params.security_enabled:
-      kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+      kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}; ")
     else:
       kinitcmd=None
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
index 8d15e47..e9d1f18 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
@@ -27,9 +27,15 @@ def webhcat_service_check():
        mode=0755
   )
 
+  if params.security_enabled:
+    smokeuser_keytab=params.smoke_user_keytab
+    smoke_user_principal=params.smokeuser_principal
+  else:
+    smokeuser_keytab= "no_keytab"
+    smoke_user_principal="no_principal"
+
   cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-               " {security_param} {kinit_path_local}",
-               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+               " {security_param} {kinit_path_local} {smoke_user_principal}")
 
   Execute(cmd,
           tries=3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
index 58549bc..b3337cf 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
@@ -72,7 +72,7 @@ if config is not None:
     cluster_env = get_property_value(configurations, 'cluster-env')
 
     if cluster_env is not None:
-      smoke_test_principal = get_property_value(cluster_env, 'smokeuser', None, True, None)
+      smoke_test_principal = get_property_value(cluster_env, 'smokeuser_principal_name', None, True, None)
       smoke_test_keytab_file = get_property_value(cluster_env, 'smokeuser_keytab', None, True, None)
 
       default_group = get_property_value(cluster_env, 'user_group')

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
index 938d1d7..de3e4b8 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
@@ -138,6 +138,7 @@ users_ldif = config['configurations']['users-ldif']['content']
 java_home = config['hostLevelParams']['java_home']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 if security_enabled:

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/service_check.py
index 0349c24..e05262f 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/service_check.py
@@ -32,7 +32,7 @@ class KnoxServiceCheck(Script):
         python_executable = sys.executable
         validateStatusCmd = format("{python_executable} {validateKnoxFilePath} -p {knox_host_port} -n {knox_host_name}")
         if params.security_enabled:
-          kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+          kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
           smoke_cmd = format("{kinit_cmd} {validateStatusCmd}")
         else:
           smoke_cmd = validateStatusCmd

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
index 4e3e6ae..4efb999 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
@@ -66,7 +66,7 @@ def execute(parameters=None, host_name=None):
       else:
         return (RESULT_CODE_UNKNOWN, ['The Smokeuser keytab is required when security is enabled.'])
       kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-      kinitcmd = format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser}; ")
+      kinitcmd = format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}; ")
 
       Execute(kinitcmd,
               user=smokeuser,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index 30d878c..bdd4e26 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -65,6 +65,7 @@ export smoke_test_user=$5
 export security_enabled=$6
 export smoke_user_keytab=$7
 export kinit_path_local=$8
+export smokeuser_principal=$9
 
 export OOZIE_EXIT_CODE=0
 export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
@@ -95,7 +96,7 @@ sudo sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps
 sudo sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
 
 if [[ $security_enabled == "True" ]]; then
-  kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smoke_test_user}; "
+  kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smokeuser_principal}; "
 else 
   kinitcmd=""
 fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
index dc8ba0a..64e3f74 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
@@ -93,6 +93,7 @@ conf_dir = "/etc/oozie/conf"
 hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 user_group = config['configurations']['cluster-env']['user_group']
 jdk_location = config['hostLevelParams']['jdk_location']
 check_db_connection_jar_name = "DBConnectionVerification.jar"

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
index 40f8b8d..cac6ede 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
@@ -43,7 +43,7 @@ def oozie_smoke_shell_file(
   os_family = System.get_instance().os_family
   
   if params.security_enabled:
-    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local}")
+    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
   else:
     sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled}")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
index c31ac34..3d4acac 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
@@ -50,6 +50,7 @@ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 user_group = config['configurations']['cluster-env']['user_group']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index 8cbcdac..7137e60 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -42,6 +42,7 @@ class PigServiceCheck(Script):
       conf_dir = params.hadoop_conf_dir,
       # for kinit run
       keytab = params.smoke_user_keytab,
+      principal = params.smokeuser_principal,
       security_enabled = params.security_enabled,
       kinit_path_local = params.kinit_path_local,
       bin_dir = params.hadoop_bin_dir
@@ -75,6 +76,7 @@ class PigServiceCheck(Script):
         conf_dir = params.hadoop_conf_dir,
         # for kinit run
         keytab = params.smoke_user_keytab,
+        principal = params.smokeuser_principal,
         security_enabled = params.security_enabled,
         kinit_path_local = params.kinit_path_local,
         bin_dir = params.hadoop_bin_dir
@@ -84,7 +86,7 @@ class PigServiceCheck(Script):
       copy_tarballs_to_hdfs('tez', 'hadoop-client', params.smokeuser, params.hdfs_user, params.user_group)
 
       if params.security_enabled:
-        kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+        kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
         Execute(kinit_cmd,
                 user=params.smokeuser
         )

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index 2ee07d7..fbb1973 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -42,6 +42,7 @@ else:
 slider_conf_dir = "/etc/slider/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
index bb54dc8..af085b8 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
@@ -28,7 +28,7 @@ class SliderServiceCheck(Script):
     env.set_params(params)
 
     smokeuser_kinit_cmd = format(
-      "{kinit_path_local} -kt {smokeuser_keytab} {smokeuser};") if params.security_enabled else ""
+      "{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal};") if params.security_enabled else ""
 
     servicecheckcmd = format("{smokeuser_kinit_cmd} {slider_cmd} list")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
index 2972935..27a75e4 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
@@ -47,6 +47,7 @@ else:
 zoo_conf_dir = "/etc/zookeeper"
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 user_group = config['configurations']['cluster-env']['user_group']
 sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
index c18fbc3..140862c 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
@@ -31,7 +31,7 @@ class SqoopServiceCheck(Script):
     import params
     env.set_params(params)
     if params.security_enabled:
-      Execute(format("{kinit_path_local}  -kt {smoke_user_keytab} {smokeuser}"),
+      Execute(format("{kinit_path_local}  -kt {smoke_user_keytab} {smokeuser_principal}"),
               user = params.smokeuser,
       )
     Execute("sqoop version",

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
index a9f4367..4ee1839 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
@@ -36,7 +36,7 @@ class MapReduce2ServiceCheck(Script):
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
 
     if params.security_enabled:
-      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
 
       Execute(kinit_cmd,
               user=params.smokeuser

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
index 8819dc0..53beb96 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
@@ -89,6 +89,7 @@ yarn_user = status_params.yarn_user
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
index 7189664..42b113a 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
@@ -41,7 +41,7 @@ class ServiceCheck(Script):
     validateStatusCmd = format("{python_executable} {validateStatusFilePath} {component_type} -p {component_address} -s {hadoop_ssl_enabled}")
 
     if params.security_enabled:
-      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
       smoke_cmd = format("{kinit_cmd} {validateStatusCmd}")
     else:
       smoke_cmd = validateStatusCmd

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/files/zkSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/files/zkSmoke.sh b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/files/zkSmoke.sh
index 04f5a35..39916d3 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/files/zkSmoke.sh
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/files/zkSmoke.sh
@@ -27,6 +27,7 @@ client_port=$4
 security_enabled=$5
 kinit_path_local=$6
 smoke_user_keytab=$7
+smokeuser_principal=$8
 export ZOOKEEPER_EXIT_CODE=0
 test_output_file=/tmp/zkSmoke.out
 errors_expr="ERROR|Exception"
@@ -35,7 +36,7 @@ zkhosts=` grep "^\s*server\.[[:digit:]]"  $conf_dir/zoo.cfg  | cut -f 2 -d '=' |
 zk_node1=`echo $zkhosts | tr ' ' '\n' | head -n 1`  
 echo "zk_node1=$zk_node1"
 if [[ $security_enabled == "True" ]]; then
-  kinitcmd="$kinit_path_local -kt $smoke_user_keytab $smoke_user"
+  kinitcmd="$kinit_path_local -kt $smoke_user_keytab $smokeuser_principal"
   sudo su $smoke_user -s /bin/bash - -c "$kinitcmd"
 fi
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py
index a180b0f..2484463 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py
@@ -88,6 +88,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 #log4j.properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py
index a4efa41..338de32 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py
@@ -31,9 +31,16 @@ class ZookeeperServiceCheck(Script):
          content=StaticFile('zkSmoke.sh')
     )
 
+    if params.security_enabled:
+      smokeUserKeytab=params.smoke_user_keytab
+      smokeUserPrincipal=params.smokeuser_principal
+    else:
+      smokeUserKeytab= "no_keytab"
+      smokeUserPrincipal="no_principal"
+
+
     cmd_quorum = format("{tmp_dir}/zkSmoke.sh {zk_cli_shell} {smokeuser} {config_dir} {clientPort} "
-                  "{security_enabled} {kinit_path_local} {smokeUserKeytab}",
-                  smokeUserKeytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+                  "{security_enabled} {kinit_path_local} {smokeUserKeytab} {smokeUserPrincipal}")
 
     Execute(cmd_quorum,
             tries=3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_service.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_service.py
index 1495163..2be70cc 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_service.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_service.py
@@ -35,7 +35,7 @@ def zookeeper_service(action='start'):
     )
 
     if params.security_enabled:
-      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
 
       Execute(kinit_cmd,
               user=params.smokeuser

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
index 1155a1a..8470269 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
@@ -83,13 +83,13 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase; /usr/lib/hbase/bin/hbase shell /tmp/hbase_grant_permissions.sh',
       user = 'hbase',
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',
       try_sleep = 5,
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/lib/hbase/bin/hbase',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/lib/hbase/bin/hbase',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 6da0ba5..8e138e5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -77,7 +77,7 @@ class TestServiceCheck(RMFTestCase):
                               content = StaticFile('templetonSmoke.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit no_principal',
                               logoutput = True,
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                               tries = 3,
@@ -96,7 +96,7 @@ class TestServiceCheck(RMFTestCase):
                         hdp_stack_version = self.STACK_VERSION,
                         target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; ',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; ',
                               user = 'ambari-qa',
                               )
     self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;principal=hive/_HOST@EXAMPLE.COM' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
@@ -141,7 +141,7 @@ class TestServiceCheck(RMFTestCase):
                               content = StaticFile('templetonSmoke.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
                               logoutput = True,
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                               tries = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
index fbd6efc..df30085 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
@@ -38,6 +38,7 @@ class TestPigServiceCheck(RMFTestCase):
       user = 'ambari-qa',
       conf_dir = '/etc/hadoop/conf',
       security_enabled = False,
+      principal = UnknownConfigurationMock(),
       keytab = UnknownConfigurationMock(),
       bin_dir = '/usr/bin',
       kinit_path_local = '/usr/bin/kinit'
@@ -75,6 +76,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
       tries = 3,
       user = 'ambari-qa',
+      principal = 'ambari-qa@EXAMPLE.COM',
       conf_dir = '/etc/hadoop/conf',
       security_enabled = True, 
       keytab = '/etc/security/keytabs/smokeuser.headless.keytab',

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_service_check.py
index 8b8debe..784bb29 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_service_check.py
@@ -32,7 +32,7 @@ class TestSqoopServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit  -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit  -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM',
                               user = 'ambari-qa'
     )
     self.assertResourceCalled('Execute', 'sqoop version',

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
index f9f9812..3af8064 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
@@ -76,7 +76,7 @@ class TestServiceCheck(RMFTestCase):
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa;',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',
                       user = 'ambari-qa',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mapredsmokeoutput /user/ambari-qa/mapredsmokeinput',

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
index 0daa824..3a351b2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
@@ -64,7 +64,7 @@ class TestServiceCheck(RMFTestCase):
                           content = StaticFile('validateYarnComponentStatus.py'),
                           mode = 0755,
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /usr/bin/python2.6 /tmp/validateYarnComponentStatus.py rm -p c6402.ambari.apache.org:8088 -s False',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; /usr/bin/python2.6 /tmp/validateYarnComponentStatus.py rm -p c6402.ambari.apache.org:8088 -s False',
                           logoutput = True,
                           path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                           tries = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
index a1d9a15..d2b26d4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
@@ -92,7 +92,7 @@ class TestZookeeperServer(RMFTestCase):
                   not_if = 'ls /var/run/zookeeper/zookeeper_server.pid >/dev/null 2>&1 && ps -p `cat /var/run/zookeeper/zookeeper_server.pid` >/dev/null 2>&1',
                   user = 'zookeeper'
     )
-    self.assertResourceCalled('Execute', "/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa;",
+    self.assertResourceCalled('Execute', "/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;",
                               user="ambari-qa")
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py
index 0c71e6f..a936b1d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py
@@ -36,7 +36,7 @@ class TestServiceCheck(RMFTestCase):
                        content = StaticFile('zkSmoke.sh'),
                        mode = 0755,
     )
-    self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/lib/zookeeper/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 False /usr/bin/kinit no_keytab',
+    self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/lib/zookeeper/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 False /usr/bin/kinit no_keytab no_principal',
                        logoutput = True,
                        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                        tries = 3,
@@ -56,7 +56,7 @@ class TestServiceCheck(RMFTestCase):
                        content = StaticFile('zkSmoke.sh'),
                        mode = 0755,
     )
-    self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/lib/zookeeper/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 True /usr/bin/kinit /etc/security/keytabs/smokeuser.headless.keytab',
+    self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/lib/zookeeper/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 True /usr/bin/kinit /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM',
                        logoutput = True,
                        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                        tries = 3,
@@ -76,7 +76,7 @@ class TestServiceCheck(RMFTestCase):
                        content = StaticFile('zkSmoke.sh'),
                        mode = 0755,
     )
-    self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/hdp/current/zookeeper-client/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 False /usr/bin/kinit no_keytab',
+    self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/hdp/current/zookeeper-client/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 False /usr/bin/kinit no_keytab no_principal',
                        logoutput = True,
                        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                        tries = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 119ab88..02403e8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -476,6 +476,7 @@
             "security_enabled": "true",
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.1/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/secured.json b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
index 7f8fd68..1857e4c 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
@@ -517,6 +517,7 @@
             "security_enabled": "true",
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
index f6dfbc0..9ad4b02 100644
--- a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
@@ -80,7 +80,7 @@ class TestSliderClient(RMFTestCase):
     )
 
     self.assertResourceCalled('Execute',
-                              '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /usr/lib/slider/bin/slider list',
+                              '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; /usr/lib/slider/bin/slider list',
                               logoutput=True,
                               tries=3,
                               user='ambari-qa',

http://git-wip-us.apache.org/repos/asf/ambari/blob/786d7802/ambari-server/src/test/python/stacks/2.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/secured.json b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
index 2d020f6..ea64240 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
@@ -44,6 +44,7 @@
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
+            "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
             "kinit_path_local": "/usr/bin"
         },
         "webhcat-site": {


[13/14] ambari git commit: AMBARI-9279. MapReduce2 Service Check fails after enabling Kerberos with permission issue in local filesystem (rlevas)

Posted by yu...@apache.org.
AMBARI-9279. MapReduce2 Service Check fails after enabling Kerberos with permission issue in local filesystem (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/790cf67c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/790cf67c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/790cf67c

Branch: refs/heads/2.0-preview
Commit: 790cf67c7693e1912c21f9323fbaea397440de43
Parents: a03fb1b
Author: Robert Levas <rl...@hortonworks.com>
Authored: Thu Jan 22 16:29:52 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Jan 22 14:15:43 2015 -0800

----------------------------------------------------------------------
 .../src/test/python/stacks/2.0.6/YARN/test_nodemanager.py          | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/790cf67c/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 4a3f32d..eb46524 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -752,4 +752,4 @@ class TestNodeManager(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
\ No newline at end of file
+    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})


[10/14] ambari git commit: AMBARI-9244. JAAS configuration file parser leaves trailing quote in quoted values (rlevas)

Posted by yu...@apache.org.
AMBARI-9244. JAAS configuration file parser leaves trailing quote in quoted values (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/757256d7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/757256d7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/757256d7

Branch: refs/heads/2.0-preview
Commit: 757256d7a6990a1840b981f4343e1d80f34f11dc
Parents: 03d7aca
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Jan 21 21:09:28 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 18:15:09 2015 -0800

----------------------------------------------------------------------
 .../resource_management/TestSecurityCommons.py  | 49 ++++++++++++++++++++
 .../libraries/functions/security_commons.py     |  3 +-
 2 files changed, 51 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/757256d7/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
index 196ae34..602b88b 100644
--- a/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
+++ b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
@@ -21,6 +21,7 @@ from unittest import TestCase
 from resource_management.libraries.functions.security_commons import *
 from datetime import datetime, timedelta
 from tempfile import gettempdir
+import os
 
 class TestSecurityCommons(TestCase):
   @patch('os.path.isfile')
@@ -119,6 +120,54 @@ class TestSecurityCommons(TestCase):
 
     self.assertEquals(not result[config_file].items(), True)
 
+  def test_get_params_from_filesystem_JAAS(self):
+    conf_dir = gettempdir()
+    jaas_file = "test_jaas.conf"
+    jaas_file_path = conf_dir + os.sep + jaas_file
+
+    # Create temporary test file (mocking a files for reading isn't available for the current version
+    # of the library
+    with open(jaas_file_path, "w+") as f:
+      f.write('Client {\n'
+              '  com.sun.security.auth.module.Krb5LoginModule required\n'
+              '  useKeyTab=true\n'
+              '  storeKey=true\n'
+              '  useTicketCache=false\n'
+              '  keyTab="/etc/security/keytabs/hbase.service.keytab"\n'
+              '  principal="hbase/vp-ambari-ranger-med-0120-2.cs1cloud.internal@EXAMPLE.COM";\n'
+              '};\n')
+
+    config_file = {
+      jaas_file : FILE_TYPE_JAAS_CONF
+    }
+
+    result = get_params_from_filesystem(conf_dir, config_file)
+
+    self.assertIn('test_jaas', result)
+    self.assertIn('Client', result['test_jaas'])
+
+    self.assertIn('com.sun.security.auth.module.Krb5LoginModule', result['test_jaas']['Client'])
+    self.assertEquals('required', result['test_jaas']['Client']['com.sun.security.auth.module.Krb5LoginModule'])
+
+    self.assertIn('useKeyTab', result['test_jaas']['Client'])
+    self.assertEquals('true', result['test_jaas']['Client']['useKeyTab'])
+
+    self.assertIn('storeKey', result['test_jaas']['Client'])
+    self.assertEquals('true', result['test_jaas']['Client']['storeKey'])
+
+    self.assertIn('useTicketCache', result['test_jaas']['Client'])
+    self.assertEquals('false', result['test_jaas']['Client']['useTicketCache'])
+
+    self.assertIn('keyTab', result['test_jaas']['Client'])
+    self.assertEquals('/etc/security/keytabs/hbase.service.keytab', result['test_jaas']['Client']['keyTab'])
+
+    self.assertIn('principal', result['test_jaas']['Client'])
+    self.assertEquals('hbase/vp-ambari-ranger-med-0120-2.cs1cloud.internal@EXAMPLE.COM', result['test_jaas']['Client']['principal'])
+
+    os.unlink(jaas_file_path)
+
+    print result
+
   @patch('xml.etree.ElementTree.parse')
   def test_get_params_from_filesystem(self, et_parser_mock):
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/757256d7/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py b/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
index 7f6ed00..1e92f9d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
@@ -146,13 +146,14 @@ def get_params_from_filesystem(conf_dir, config_files):
 
     elif file_type == FILE_TYPE_JAAS_CONF:
       section_header = re.compile('^(\w+)\s+\{\s*$')
-      section_data = re.compile('^\s*([^ \s\=\}\{]+)\s*=?\s*"?([^ ";].+)"?;?\s*$')
+      section_data = re.compile('^\s*([^ \s\=\}\{]+)\s*=?\s*"?([^ ";]+)"?;?\s*$')
       section_footer = re.compile('^\}\s*;?\s*$')
       section_name = "root"
       result[file_name] = {}
       with open(conf_dir + os.sep + config_file, 'r') as f:
         for line in f:
           if line:
+            line = line.strip()
             m = section_header.search(line)
             if m:
               section_name = m.group(1)


[09/14] ambari git commit: Revert "AMBARI-9228. Ambari Server setup to install and copy JCE policy file in-place (handle both Default / Custom JDK scenarios) (rlevas)"

Posted by yu...@apache.org.
Revert "AMBARI-9228. Ambari Server setup to install and copy JCE policy file in-place (handle both Default / Custom JDK scenarios) (rlevas)"

This reverts commit 049b692472151a4c3488afd2157dcd2ff509f265.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/03d7acae
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/03d7acae
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/03d7acae

Branch: refs/heads/2.0-preview
Commit: 03d7acae7493212f71c37efdd00b261da7856c50
Parents: 1154fe4
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Wed Jan 21 17:28:35 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 17:28:35 2015 -0800

----------------------------------------------------------------------
 ambari-server/sbin/ambari-server                |   6 +-
 ambari-server/src/main/python/ambari-server.py  | 195 -------------------
 .../python/ambari_server/serverConfiguration.py |   2 -
 .../src/test/python/TestAmbariServer.py         | 138 +------------
 4 files changed, 4 insertions(+), 337 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/03d7acae/ambari-server/sbin/ambari-server
----------------------------------------------------------------------
diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server
index 168790b..c034f5d 100755
--- a/ambari-server/sbin/ambari-server
+++ b/ambari-server/sbin/ambari-server
@@ -111,10 +111,6 @@ case "$1" in
         echo -e "Setup ambari-server"
         $PYTHON /usr/sbin/ambari-server.py $@
         ;;
-  setup-jce)
-        echo -e "Updating jce policy"
-        $PYTHON /usr/sbin/ambari-server.py $@
-        ;;
   setup-ldap)
         echo -e "Setting up LDAP properties..."
         $PYTHON /usr/sbin/ambari-server.py $@
@@ -141,7 +137,7 @@ case "$1" in
         ;;
   *)
         echo "Usage: /usr/sbin/ambari-server
-        {start|stop|restart|setup|setup-jce|upgrade|status|upgradestack|setup-ldap|sync-ldap|setup-security|refresh-stack-hash|backup|restore} [options]
+        {start|stop|restart|setup|upgrade|status|upgradestack|setup-ldap|sync-ldap|setup-security|refresh-stack-hash|backup|restore} [options]
         Use usr/sbin/ambari-server <action> --help to get details on options available.
         Or, simply invoke ambari-server.py --help to print the options."
         exit 1

http://git-wip-us.apache.org/repos/asf/ambari/blob/03d7acae/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index b75611f..7c08083 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -81,7 +81,6 @@ UPGRADE_ACTION = "upgrade"
 UPGRADE_STACK_ACTION = "upgradestack"
 STATUS_ACTION = "status"
 SETUP_HTTPS_ACTION = "setup-https"
-SETUP_JCE_ACTION = "setup-jce"
 LDAP_SETUP_ACTION = "setup-ldap"
 LDAP_SYNC_ACTION = "sync-ldap"
 SETUP_GANGLIA_HTTPS_ACTION = "setup-ganglia-https"
@@ -404,7 +403,6 @@ DEFAULT_JDK16_LOCATION = "/usr/jdk64/jdk1.6.0_31"
 JDK_INDEX = 0
 JDK_VERSION_REs = ["(jdk.*)/jre", "Creating (jdk.*)/jre"]
 CUSTOM_JDK_NUMBER = "3"
-IS_CUSTOM_JDK = False
 JDK_MIN_FILESIZE = 5000
 CREATE_JDK_DIR_CMD = "/bin/mkdir -p " + configDefaults.JDK_INSTALL_DIR
 MAKE_FILE_EXECUTABLE_CMD = "chmod a+x {0}"
@@ -1562,7 +1560,6 @@ def install_jce_manualy(args):
 #
 def download_jdk(args):
   global JDK_INDEX
-  global IS_CUSTOM_JDK
   properties = get_ambari_properties()
   if properties == -1:
     err = "Error getting ambari properties"
@@ -1607,7 +1604,6 @@ Enter choice (""" + jdk_num + "):",
     )
 
     if jdk_num == CUSTOM_JDK_NUMBER:
-      IS_CUSTOM_JDK = True
       print_warning_msg("JDK must be installed on all hosts and JAVA_HOME must be valid on all hosts.")
       print_warning_msg(jcePolicyWarn)
       args.java_home = get_validated_string_input("Path to JAVA_HOME: ", None, None, None, False, False)
@@ -2029,174 +2025,6 @@ def verify_setup_allowed():
   return 0
 
 
-def unpack_jce_policy():
-  properties = get_ambari_properties()
-  jdk_path = properties.get_property(JAVA_HOME_PROPERTY)
-  jdk_security_path = jdk_path + os.sep + configDefaults.JDK_SECURITY_DIR
-
-  jce_name = properties.get_property(JCE_NAME_PROPERTY)
-  jce_zip_path = configDefaults.SERVER_RESOURCES_DIR + os.sep + jce_name
-  unpack_cmd = 'unzip -o -j -q {0} -d {1}'.format(jce_zip_path, jdk_security_path)
-
-  if os.path.exists(jdk_security_path) and os.path.exists(jce_zip_path) and validate_jdk(jdk_path):
-    try:
-      retcode, out, err = run_os_command(unpack_cmd)
-      if retcode != 0:
-        raise FatalException(retcode, err)
-    except Exception as e:
-      err = "Fail during the execution of '{0}'. {1}".format(unpack_cmd.format(jce_zip_path, jdk_security_path), e)
-      raise FatalException(1, err)
-  else:
-    err = "Can not execute {0}. The path {1}, {2} or {3} is invalid.".format(unpack_cmd, jdk_security_path, jce_zip_path, jdk_path)
-    raise FatalException(1, err)
-
-#
-# Setup the JCE policy for Ambari Server.
-#
-def setup_jce_policy(path):
-  if os.path.exists(path):
-    copy_cmd = 'cp {0} {1}'.format(path, configDefaults.SERVER_RESOURCES_DIR)
-    try:
-      retcode, out, err = run_os_command(copy_cmd)
-      if retcode != 0:
-        raise FatalException(retcode, err)
-    except Exception as e:
-      err = "Fail during the execution of '{0}'. {1}".format(copy_cmd.format(path, configDefaults.SERVER_RESOURCES_DIR), e)
-      raise FatalException(1, err)
-  else:
-    err = "Can not run 'setup-jce'. Invalid path {0}.".format(path)
-    raise FatalException(1, err)
-  conf_file = search_file(AMBARI_PROPERTIES_FILE, get_conf_dir())
-  properties = get_ambari_properties()
-  zip_name = os.path.split(path)[1]
-  properties.process_pair(JCE_NAME_PROPERTY, zip_name)
-  try:
-    properties.store(open(conf_file, "w"))
-  except Exception, e:
-    print_error_msg('Could not write ambari config file "%s": %s' % (conf_file, e))
-
-  print 'Installing JCE policy...'
-  try:
-    unpack_jce_policy()
-  except FatalException as e:
-    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
-    raise FatalException(e.code, err)
-  print 'NOTE: Restart Ambari Server to apply changes' + \
-        ' ("ambari-server restart|stop|start")'
-
-
-def unpack_jce_policy():
-  properties = get_ambari_properties()
-  jdk_path = properties.get_property(JAVA_HOME_PROPERTY)
-  jdk_security_path = jdk_path + os.sep + configDefaults.JDK_SECURITY_DIR
-
-  jce_name = properties.get_property(JCE_NAME_PROPERTY)
-  jce_zip_path = configDefaults.SERVER_RESOURCES_DIR + os.sep + jce_name
-  unpack_cmd = 'unzip -o -j -q {0} -d {1}'.format(jce_zip_path, jdk_security_path)
-
-  if os.path.exists(jdk_security_path) and os.path.exists(jce_zip_path) and validate_jdk(jdk_path):
-    try:
-      retcode, out, err = run_os_command(unpack_cmd)
-      if retcode != 0:
-        raise FatalException(retcode, err)
-    except Exception as e:
-      err = "Fail during the execution of '{0}'. {1}".format(unpack_cmd.format(jce_zip_path, jdk_security_path), e)
-      raise FatalException(1, err)
-  else:
-    err = "Can not execute {0}. The path {1}, {2} or {3} is invalid.".format(unpack_cmd, jdk_security_path, jce_zip_path, jdk_path)
-    raise FatalException(1, err)
-
-#
-# Setup the JCE policy for Ambari Server.
-#
-def setup_jce_policy(path):
-  if os.path.exists(path):
-    copy_cmd = 'cp {0} {1}'.format(path, configDefaults.SERVER_RESOURCES_DIR)
-    try:
-      retcode, out, err = run_os_command(copy_cmd)
-      if retcode != 0:
-        raise FatalException(retcode, err)
-    except Exception as e:
-      err = "Fail during the execution of '{0}'. {1}".format(copy_cmd.format(path, configDefaults.SERVER_RESOURCES_DIR), e)
-      raise FatalException(1, err)
-  else:
-    err = "Can not run 'setup-jce'. Invalid path {0}.".format(path)
-    raise FatalException(1, err)
-  conf_file = search_file(AMBARI_PROPERTIES_FILE, get_conf_dir())
-  properties = get_ambari_properties()
-  zip_name = os.path.split(path)[1]
-  properties.process_pair(JCE_NAME_PROPERTY, zip_name)
-  try:
-    properties.store(open(conf_file, "w"))
-  except Exception, e:
-    print_error_msg('Could not write ambari config file "%s": %s' % (conf_file, e))
-
-  print 'Installing JCE policy...'
-  try:
-    unpack_jce_policy()
-  except FatalException as e:
-    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
-    raise FatalException(e.code, err)
-  print 'NOTE: Restart Ambari Server to apply changes' + \
-        ' ("ambari-server restart|stop|start")'
-
-
-def unpack_jce_policy():
-  properties = get_ambari_properties()
-  jdk_path = properties.get_property(JAVA_HOME_PROPERTY)
-  jdk_security_path = jdk_path + os.sep + configDefaults.JDK_SECURITY_DIR
-
-  jce_name = properties.get_property(JCE_NAME_PROPERTY)
-  jce_zip_path = configDefaults.SERVER_RESOURCES_DIR + os.sep + jce_name
-  unpack_cmd = 'unzip -o -j -q {0} -d {1}'.format(jce_zip_path, jdk_security_path)
-
-  if os.path.exists(jdk_security_path) and os.path.exists(jce_zip_path) and validate_jdk(jdk_path):
-    try:
-      retcode, out, err = run_os_command(unpack_cmd)
-      if retcode != 0:
-        raise FatalException(retcode, err)
-    except Exception as e:
-      err = "Fail during the execution of '{0}'. {1}".format(unpack_cmd.format(jce_zip_path, jdk_security_path), e)
-      raise FatalException(1, err)
-  else:
-    err = "Can not execute {0}. The path {1}, {2} or {3} is invalid.".format(unpack_cmd, jdk_security_path, jce_zip_path, jdk_path)
-    raise FatalException(1, err)
-
-#
-# Setup the JCE policy for Ambari Server.
-#
-def setup_jce_policy(path):
-  if os.path.exists(path):
-    copy_cmd = 'cp {0} {1}'.format(path, configDefaults.SERVER_RESOURCES_DIR)
-    try:
-      retcode, out, err = run_os_command(copy_cmd)
-      if retcode != 0:
-        raise FatalException(retcode, err)
-    except Exception as e:
-      err = "Fail during the execution of '{0}'. {1}".format(copy_cmd.format(path, configDefaults.SERVER_RESOURCES_DIR), e)
-      raise FatalException(1, err)
-  else:
-    err = "Can not run 'setup-jce'. Invalid path {0}.".format(path)
-    raise FatalException(1, err)
-  conf_file = search_file(AMBARI_PROPERTIES_FILE, get_conf_dir())
-  properties = get_ambari_properties()
-  zip_name = os.path.split(path)[1]
-  properties.process_pair(JCE_NAME_PROPERTY, zip_name)
-  try:
-    properties.store(open(conf_file, "w"))
-  except Exception, e:
-    print_error_msg('Could not write ambari config file "%s": %s' % (conf_file, e))
-
-  print 'Installing JCE policy...'
-  try:
-    unpack_jce_policy()
-  except FatalException as e:
-    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
-    raise FatalException(e.code, err)
-  print 'NOTE: Restart Ambari Server to apply changes' + \
-        ' ("ambari-server restart|stop|start")'
-
-
 #
 # Setup the Ambari Server.
 #
@@ -2255,24 +2083,6 @@ def setup(args):
     err = 'Downloading or installing JDK failed: {0}. Exiting.'.format(e)
     raise FatalException(e.code, err)
 
-  print 'Installing JCE policy...'
-  try:
-    unpack_jce_policy()
-  except FatalException as e:
-    err = 'Installing JCE failed: {0}. Exiting.'.format(e)
-    raise FatalException(e.code, err)
-
-
-
-  if not IS_CUSTOM_JDK: # If it's not a custom JDK, will also install JCE policy automatically
-    print 'Installing JCE policy...'
-    try:
-      unpack_jce_policy()
-    except FatalException as e:
-      err = 'Installing JCE failed: {0}. Exiting.'.format(e)
-      raise FatalException(e.code, err)
-
-
   print 'Completing setup...'
   retcode = configure_os_settings()
   if not retcode == 0:
@@ -4413,8 +4223,6 @@ def main():
     possible_args_numbers = [2,4] # OR
   elif action == BACKUP_ACTION or action == RESTORE_ACTION:
     possible_args_numbers = [1,2]
-  elif action == SETUP_JCE_ACTION:
-    possible_args_numbers = [2]
   else:
     possible_args_numbers = [1]
 
@@ -4436,9 +4244,6 @@ def main():
       start(options)
     elif action == STOP_ACTION:
       stop(options)
-    elif action == SETUP_JCE_ACTION:
-      path = args[1]
-      setup_jce_policy(path)
     elif action == RESET_ACTION:
       reset(options)
     elif action == STATUS_ACTION:

http://git-wip-us.apache.org/repos/asf/ambari/blob/03d7acae/ambari-server/src/main/python/ambari_server/serverConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverConfiguration.py b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
index 6964719..44c30ce 100644
--- a/ambari-server/src/main/python/ambari_server/serverConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
@@ -156,8 +156,6 @@ class ServerConfigDefaults(object):
     self.JDK_INSTALL_DIR = ""
     self.JDK_SEARCH_PATTERN = ""
     self.JAVA_EXE_SUBPATH = ""
-    self.JDK_SECURITY_DIR = "jre/lib/security"
-    self.SERVER_RESOURCES_DIR = "/var/lib/ambari-server/resources"
 
     # Configuration defaults
     self.DEFAULT_CONF_DIR = ""

http://git-wip-us.apache.org/repos/asf/ambari/blob/03d7acae/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index e0b390c..408c088 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -2418,119 +2418,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     result = _ambari_server_.find_jdk()
     self.assertEqual(result, "two")
 
-  @patch.object(_ambari_server_, "get_ambari_properties")
-  @patch("os.path.exists")
-  @patch.object(_ambari_server_, "run_os_command")
-  @patch.object(_ambari_server_, "validate_jdk")
-  def test_unpack_jce_policy(self, validate_jdk_mock, run_os_command_mock, exists_mock, get_ambari_properties_mock):
-    properties = MagicMock()
-    get_ambari_properties_mock.return_value = properties
-    exists_mock.return_value = True
-    run_os_command_mock.return_value = 0 , "", ""
-    validate_jdk_mock.return_value = True
-
-    _ambari_server_.unpack_jce_policy()
-    self.assertTrue(run_os_command_mock.called)
-    self.assertTrue(validate_jdk_mock.called)
-
-    # Testing with bad jdk_security_path or jce_zip_path
-    exists_mock.return_value = False
-    try:
-      _ambari_server_.unpack_jce_policy()
-    except FatalException:
-      self.assertTrue(True)
-    exists_mock.return_value = True
-
-    # Testing with bad jdk path
-    validate_jdk_mock.return_value = False
-    try:
-      _ambari_server_.unpack_jce_policy()
-    except FatalException:
-      self.assertTrue(True)
-    validate_jdk_mock.return_value = True
-
-    # Testing with return code distinct to 0 for run_os_command
-    run_os_command_mock.return_value = 3 , "", ""
-    try:
-      _ambari_server_.unpack_jce_policy()
-    except FatalException:
-      self.assertTrue(True)
-    run_os_command_mock.return_value = 0 , "", ""
-
-    # Testing with an error produced by run_os_command
-    run_os_command_mock.reset_mock()
-    run_os_command_mock.side_effect = FatalException(1, "The command fails.")
-    try:
-      _ambari_server_.unpack_jce_policy()
-    except FatalException:
-      self.assertTrue(True)
-
-  @patch("os.path.exists")
-  @patch.object(_ambari_server_, "run_os_command")
-  @patch("os.path.split")
-  @patch.object(_ambari_server_, "unpack_jce_policy")
-  @patch.object(_ambari_server_, "get_ambari_properties")
-  @patch.object(_ambari_server_, "search_file")
-  @patch("__builtin__.open")
-  def test_setup_jce_policy(self, open_mock, search_file_mock, get_ambari_properties_mock, unpack_jce_policy_mock, split_mock, run_os_command_mock, exists_mock):
-    exists_mock.return_value = True
-    run_os_command_mock.return_value = 0 , "", ""
-    properties = MagicMock()
-    unpack_jce_policy_mock.return_value = 0
-    get_ambari_properties_mock.return_value = properties
-    conf_file = 'etc/ambari-server/conf/ambari.properties'
-    search_file_mock.return_value = conf_file
-    split_mock.return_value = [_ambari_server_.configDefaults.SERVER_RESOURCES_DIR, 'UnlimitedJCEPolicyJDK7.zip']
-
-    path = '/path/to/JCEPolicy.zip'
-    copy_cmd = 'cp {0} {1}'.format(path, _ambari_server_.configDefaults.SERVER_RESOURCES_DIR)
-
-    _ambari_server_.setup_jce_policy(path)
-    run_os_command_mock.assert_called_with(copy_cmd)
-    self.assertTrue(unpack_jce_policy_mock.called)
-    self.assertTrue(get_ambari_properties_mock.called)
-    self.assertTrue(properties.store.called)
-
-    # Testing with bad path
-    exists_mock.return_value = False
-    try:
-      _ambari_server_.setup_jce_policy(path)
-    except FatalException:
-      self.assertTrue(True)
-    exists_mock.return_value = True
-
-    # Testing with return code distinct to 0 for run_os_command
-    run_os_command_mock.return_value = 2, "", "Fail"
-    try:
-      _ambari_server_.setup_jce_policy(path)
-    except FatalException:
-      self.assertTrue(True)
-    run_os_command_mock.return_value = 0, "", ""
-
-    # Testing with an error produced by run_os_command
-    run_os_command_mock.reset_mock()
-    run_os_command_mock.side_effect = FatalException(1, "The command fails.")
-    try:
-      _ambari_server_.setup_jce_policy(path)
-    except FatalException:
-      self.assertTrue(True)
-    run_os_command_mock.return_value = 0, "", ""
-
-    # Testing with an error produced by Properties.store function
-    properties.store.side_effect = Exception("Invalid file.")
-    try:
-      _ambari_server_.setup_jce_policy(path)
-    except Exception:
-      self.assertTrue(True)
-    properties.reset_mock()
-
-    # Testing with an error produced by unpack_jce_policy
-    unpack_jce_policy_mock.side_effect = FatalException(1, "Can not install JCE policy")
-    try:
-      _ambari_server_.setup_jce_policy(path)
-    except FatalException:
-      self.assertTrue(True)
-
   @patch("ambari_commons.firewall.run_os_command")
   @patch.object(OSCheck, "get_os_family")
   @patch.object(OSCheck, "get_os_type")
@@ -2559,8 +2446,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(_ambari_server_, "extract_views")
   @patch.object(_ambari_server_, "adjust_directory_permissions")
   @patch.object(_ambari_server_, 'read_ambari_user')
-  @patch.object(_ambari_server_, "unpack_jce_policy")
-  def test_setup(self, unpack_jce_policy_mock, read_ambari_user_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
+  def test_setup(self, read_ambari_user_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
                  is_local_database_mock, store_remote_properties_mock,
                  setup_remote_db_mock, check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
                  check_postgre_up_mock, setup_db_mock, configure_postgres_mock,
@@ -2579,7 +2465,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
     run_os_command_mock.return_value = 3,"",""
     extract_views_mock.return_value = 0
-    unpack_jce_policy_mock.return_value = 0
 
     def reset_mocks():
       is_jdbc_user_changed_mock.reset_mock()
@@ -2689,19 +2574,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     self.assertFalse(check_selinux_mock.called)
     self.assertFalse(check_ambari_user_mock.called)
 
-
-    # Test that unpack_jce_policy is called
-    reset_mocks()
-    _ambari_server_.setup(args)
-    self.assertTrue(unpack_jce_policy_mock.called)
-
-    # Testing with an error produced by unpack_jce_policy
-    unpack_jce_policy_mock.side_effect = FatalException(1, "Can not install JCE policy")
-    try:
-      _ambari_server_.setup(args)
-    except FatalException:
-      self.assertTrue(True)
-
   @patch.object(_ambari_server_, "get_remote_script_line")
   @patch.object(_ambari_server_, "is_server_runing")
   @patch.object(_ambari_server_, "get_YN_input")
@@ -4086,8 +3958,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(_ambari_server_, "configure_os_settings")
   @patch('__builtin__.raw_input')
   @patch.object(_ambari_server_, "check_selinux")
-  @patch.object(_ambari_server_, "unpack_jce_policy")
-  def test_setup_remote_db_wo_client(self, unpack_jce_policy_mock, check_selinux_mock, raw_input, configure_os_settings_mock,
+  def test_setup_remote_db_wo_client(self, check_selinux_mock, raw_input, configure_os_settings_mock,
                                      download_jdk_mock, check_ambari_user_mock, is_root_mock,
                                      check_jdbc_drivers_mock, is_local_db_mock,
                                      store_remote_properties_mock, get_db_cli_tool_mock, get_YN_input,
@@ -4111,7 +3982,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     download_jdk_mock.return_value = 0
     configure_os_settings_mock.return_value = 0
     verify_setup_allowed_method.return_value = 0
-    unpack_jce_policy_mock.return_value = 0
 
     try:
       _ambari_server_.setup(args)
@@ -5537,8 +5407,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(_ambari_server_, "adjust_directory_permissions")
   @patch("sys.exit")
   @patch('__builtin__.raw_input')
-  @patch.object(_ambari_server_, "unpack_jce_policy")
-  def test_ambariServerSetupWithCustomDbName(self, unpack_jce_policy_mock, raw_input, exit_mock, adjust_dirs_mock, extract_views_mock, store_password_file_mock,
+  def test_ambariServerSetupWithCustomDbName(self, raw_input, exit_mock, adjust_dirs_mock, extract_views_mock, store_password_file_mock,
                                              get_is_secure_mock, setup_db_mock, is_root_mock, is_local_database_mock,
                                              check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
                                              check_postgre_up_mock, configure_postgres_mock,
@@ -5569,7 +5438,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     get_os_type_mock.return_value = ""
     get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
     run_os_command_mock.return_value = 3,"",""
-    unpack_jce_policy_mock.return_value = 0
 
     new_db = "newDBName"
     args.dbms = "postgres"


[07/14] ambari git commit: AMBARI-9247. Wrong stack version of Service installed (ncole)

Posted by yu...@apache.org.
AMBARI-9247.  Wrong stack version of Service installed (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84bfe433
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84bfe433
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84bfe433

Branch: refs/heads/2.0-preview
Commit: 84bfe4334531fd049281ee869128a09cac06f6ee
Parents: 049b692
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Jan 21 15:05:10 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 14:16:22 2015 -0800

----------------------------------------------------------------------
 .../server/api/services/AmbariMetaInfo.java     |  8 ++--
 .../apache/ambari/server/stack/StackModule.java | 13 ++++---
 .../ambari/server/state/RepositoryInfo.java     | 27 ++++++++++---
 .../server/state/stack/LatestRepoCallable.java  |  3 +-
 .../server/api/services/AmbariMetaInfoTest.java | 18 ++++++++-
 .../server/upgrade/StackUpgradeUtilTest.java    | 40 ++++++++++----------
 6 files changed, 70 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/84bfe433/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 1b7e2fe..897cf52 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -44,17 +44,17 @@ import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.customactions.ActionDefinitionManager;
 import org.apache.ambari.server.events.AlertDefinitionRegistrationEvent;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
+import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.metadata.AgentAlertDefinitions;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
-import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.orm.dao.MetainfoDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.stack.StackContext;
 import org.apache.ambari.server.stack.StackDirectory;
+import org.apache.ambari.server.stack.StackManager;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.stack.StackManager;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.DependencyInfo;
 import org.apache.ambari.server.state.OperatingSystemInfo;
@@ -766,10 +766,12 @@ public class AmbariMetaInfo {
       entity.setMetainfoName(metaKey);
       entity.setMetainfoValue(newBaseUrl);
 
-      if (null != ri.getDefaultBaseUrl() && newBaseUrl.equals(ri.getDefaultBaseUrl())) {
+      // !!! need a way to remove
+      if (newBaseUrl.equals("")) {
         metaInfoDAO.remove(entity);
       } else {
         metaInfoDAO.merge(entity);
+        ri.setBaseUrlFromSaved(true);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/84bfe433/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
index 933f187..8da2ff9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -18,6 +18,12 @@
 
 package org.apache.ambari.server.stack;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.state.RepositoryInfo;
@@ -29,12 +35,6 @@ import org.apache.ambari.server.state.stack.StackMetainfoXml;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 /**
  * Stack module which provides all functionality related to parsing and fully
  * resolving stacks from the stack definition.
@@ -561,6 +561,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> {
 
     if (null != updatedUrl) {
       ri.setBaseUrl(updatedUrl);
+      ri.setBaseUrlFromSaved(true);
     }
 
     if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/84bfe433/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
index 5347682..29776ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
@@ -28,6 +28,7 @@ public class RepositoryInfo {
   private String mirrorsList;
   private String defaultBaseUrl;
   private String latestBaseUrl;
+  private boolean baseSaved = false;
 
   /**
    * @return the baseUrl
@@ -98,7 +99,7 @@ public class RepositoryInfo {
   public void setMirrorsList(String mirrorsList) {
     this.mirrorsList = mirrorsList;
   }
-  
+
   /**
    * @return the default base url
    */
@@ -119,14 +120,28 @@ public class RepositoryInfo {
   public String getLatestBaseUrl() {
     return latestBaseUrl;
   }
-  
+
   /**
    * @param url the latest determined base url
    */
   public void setLatestBaseUrl(String url) {
     latestBaseUrl = url;
-  }  
-  
+  }
+
+  /**
+   * @return if the base url was from a saved value
+   */
+  public boolean isBaseUrlFromSaved() {
+    return baseSaved;
+  }
+
+  /**
+   * Sets if the base url was from a saved value
+   */
+  public void setBaseUrlFromSaved(boolean saved) {
+    baseSaved = saved;
+  }
+
   @Override
   public String toString() {
     return "[ repoInfo: "
@@ -137,8 +152,8 @@ public class RepositoryInfo {
         + ", mirrorsList=" + mirrorsList
         + " ]";
   }
-  
-  
+
+
   public RepositoryResponse convertToResponse()
   {
     return new RepositoryResponse(getBaseUrl(), getOsType(), getRepoId(),

http://git-wip-us.apache.org/repos/asf/ambari/blob/84bfe433/ambari-server/src/main/java/org/apache/ambari/server/state/stack/LatestRepoCallable.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/LatestRepoCallable.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/LatestRepoCallable.java
index 608637c..373be33 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/LatestRepoCallable.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/LatestRepoCallable.java
@@ -126,8 +126,7 @@ public class LatestRepoCallable implements Callable<Void> {
               }
 
               ri.setLatestBaseUrl(baseUrl);
-              if (ri.getBaseUrl() != null
-                  && ri.getBaseUrl().equals(ri.getDefaultBaseUrl())) {
+              if (ri.getBaseUrl() != null && !ri.isBaseUrlFromSaved()) {
                 // Override baseUrl with the latestBaseUrl.
                 ri.setBaseUrl(baseUrl);
               }

http://git-wip-us.apache.org/repos/asf/ambari/blob/84bfe433/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 5579db3..c0ff4a6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -42,7 +42,6 @@ import java.util.Set;
 import javax.persistence.EntityManager;
 import javax.xml.bind.JAXBException;
 
-import com.google.gson.Gson;
 import junit.framework.Assert;
 
 import org.apache.ambari.server.AmbariException;
@@ -82,6 +81,8 @@ import org.apache.ambari.server.state.stack.MetricDefinition;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -89,6 +90,7 @@ import org.junit.rules.TemporaryFolder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.gson.Gson;
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
@@ -116,7 +118,6 @@ public class AmbariMetaInfoTest {
   private static final String NON_EXT_VALUE = "XXX";
 
   private static final int REPOS_CNT = 3;
-  private static final int STACKS_NAMES_CNT = 2;
   private static final int PROPERTIES_CNT = 62;
   private static final int OS_CNT = 4;
 
@@ -272,9 +273,22 @@ public class AmbariMetaInfoTest {
       }
     }
 
+    Capture<MetainfoEntity> c = new Capture<MetainfoEntity>();
+
+    metainfoDAO = ambariMetaInfo.metaInfoDAO;
+    reset(metainfoDAO);
+    reset(entity);
+    expect(metainfoDAO.findByKey("repo:/HDP/2.1.1/redhat6/HDP-2.1.1:baseurl")).andReturn(entity).atLeastOnce();
+    expect(metainfoDAO.merge(EasyMock.capture(c))).andReturn(entity).atLeastOnce();
+    replay(metainfoDAO, entity);
+
     // Reset the database with the original baseUrl
     ambariMetaInfo.updateRepoBaseURL(STACK_NAME_HDP, "2.1.1", "redhat6",
             HDP_REPO_ID, prevBaseUrl);
+
+    assertEquals(prevBaseUrl, c.getValue().getMetainfoValue());
+    assertTrue(repoInfo.isBaseUrlFromSaved());
+
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/84bfe433/ambari-server/src/test/java/org/apache/ambari/server/upgrade/StackUpgradeUtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/StackUpgradeUtilTest.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/StackUpgradeUtilTest.java
index 64de35d..35650a1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/StackUpgradeUtilTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/StackUpgradeUtilTest.java
@@ -42,57 +42,57 @@ import com.google.inject.persist.PersistService;
 public class StackUpgradeUtilTest {
 
   private Injector injector;
-  
+
   @Before
   public void setup() throws Exception {
     injector = Guice.createInjector(new InMemoryDefaultTestModule());
     injector.getInstance(GuiceJpaInitializer.class);
   }
-  
+
   @After
   public void teardown() throws Exception {
     injector.getInstance(PersistService.class).stop();
   }
-  
+
   private void reset(String stackName, String stackVersion) throws Exception {
     AmbariMetaInfo ami = injector.getInstance(AmbariMetaInfo.class);
-    
+
     for (Entry<String, List<RepositoryInfo>> entry : ami.getRepository(stackName, stackVersion).entrySet()) {
       for (RepositoryInfo ri : entry.getValue()) {
         if (-1 == ri.getRepoId().indexOf("epel")) {
           ami.updateRepoBaseURL(stackName, stackVersion,
-              ri.getOsType(), ri.getRepoId(), ri.getDefaultBaseUrl());
+              ri.getOsType(), ri.getRepoId(), "");
         }
       }
     }
-    
+
   }
-  
+
   @Test
   public void testUpgradeStack() throws Exception {
     StackUpgradeUtil stackUpgradeUtil = injector.getInstance(StackUpgradeUtil.class);
-    
+
     String stackName = "HDP";
     String stackVersion = "1.3.0";
     String localRepoUrl = "http://foo.bar";
-    
+
     // check updating all
     stackUpgradeUtil.updateLocalRepo(stackName, stackVersion, localRepoUrl, null);
-    
+
     MetainfoDAO dao = injector.getInstance(MetainfoDAO.class);
-    
+
     Collection<MetainfoEntity> entities = dao.findAll();
     Assert.assertTrue(entities.size() > 0);
-    
+
     for (MetainfoEntity entity : entities) {
       Assert.assertTrue(entity.getMetainfoName().startsWith("repo:/HDP/1.3.0/"));
       Assert.assertEquals(localRepoUrl, entity.getMetainfoValue());
     }
-    
+
     reset (stackName, stackVersion);
     entities = dao.findAll();
-    Assert.assertTrue(0 == entities.size());
-    
+    Assert.assertEquals(0, entities.size());
+
     // check updating only centos6
     stackUpgradeUtil.updateLocalRepo(stackName, stackVersion, localRepoUrl, "centos6");
 
@@ -106,7 +106,7 @@ public class StackUpgradeUtilTest {
     reset (stackName, stackVersion);
     entities = dao.findAll();
     Assert.assertTrue(0 == entities.size());
-    
+
     // check updating only centos6 and centos5
     stackUpgradeUtil.updateLocalRepo(stackName, stackVersion, localRepoUrl, "centos6,centos5");
 
@@ -118,7 +118,7 @@ public class StackUpgradeUtilTest {
           entity.getMetainfoName().startsWith("repo:/HDP/1.3.0/centos5"));
       Assert.assertEquals(localRepoUrl, entity.getMetainfoValue());
     }
-    
+
     // verify that a change to centos6 also changes redhat6
     localRepoUrl = "http://newfoo.bar";
     stackUpgradeUtil.updateLocalRepo(stackName, stackVersion, localRepoUrl, "centos6");
@@ -138,8 +138,8 @@ public class StackUpgradeUtilTest {
     }
     Assert.assertTrue(foundCentos6);
     Assert.assertTrue(foundRedhat6);
-    
+
   }
-  
-  
+
+
 }


[05/14] ambari git commit: AMBARI-9149. Test principal and keytab required for service check should be created as part of kerberos service check action (rlevas)

Posted by yu...@apache.org.
AMBARI-9149. Test principal and keytab required for service check should be created as part of kerberos service check action (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/339e8a76
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/339e8a76
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/339e8a76

Branch: refs/heads/2.0-preview
Commit: 339e8a76a4b43c5ba12953a3cd66558647eda4a9
Parents: 9f29148
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Jan 21 13:51:45 2015 -0500
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Jan 21 12:21:46 2015 -0800

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java         |  33 +-
 .../server/controller/KerberosHelper.java       | 876 ++++++++++++-------
 .../internal/RequestStageContainer.java         |  24 +-
 .../KERBEROS/1.10.3-10/kerberos.json            |  17 +
 .../main/resources/stacks/HDP/2.2/kerberos.json |   2 +-
 .../AmbariCustomCommandExecutionHelperTest.java |  49 +-
 .../BackgroundCustomCommandExecutionTest.java   |  16 +-
 .../server/controller/KerberosHelperTest.java   | 327 ++++++-
 8 files changed, 992 insertions(+), 352 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index ae57d1f..dd18e8d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -2823,12 +2823,31 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       actionExecutionHelper.validateAction(actionRequest);
     }
 
+    long requestId = actionManager.getNextRequestId();
+    RequestStageContainer requestStageContainer = new RequestStageContainer(
+        requestId,
+        null,
+        requestFactory,
+        actionManager,
+        actionRequest);
+
+    // If the request is to perform the Kerberos service check, set up the stages to
+    // ensure that the (cluster-level) smoke user principal and keytab is available on all hosts
+    if (Role.KERBEROS_SERVICE_CHECK.name().equals(actionRequest.getCommandName())) {
+      Map<String, Collection<String>> serviceComponentFilter = new HashMap<String, Collection<String>>();
+      Collection<String> identityFilter = Arrays.asList("/smokeuser");
+
+      serviceComponentFilter.put("KERBEROS", null);
+
+      requestStageContainer = kerberosHelper.ensureIdentities(cluster, null, serviceComponentFilter,
+          identityFilter, requestStageContainer);
+    }
+
     ExecuteCommandJson jsons = customCommandExecutionHelper.getCommandJson(
         actionExecContext, cluster);
 
-    Stage stage = createNewStage(0, cluster, actionManager.getNextRequestId(), requestContext,
-      jsons.getClusterHostInfo(), jsons.getCommandParamsForStage(),
-        jsons.getHostParamsForStage());
+    Stage stage = createNewStage(requestStageContainer.getLastStageId(), cluster, requestId, requestContext,
+        jsons.getClusterHostInfo(), jsons.getCommandParamsForStage(), jsons.getHostParamsForStage());
 
     if (actionRequest.isCommand()) {
       customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestProperties, false);
@@ -2848,11 +2867,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     List<Stage> stages = rg.getStages();
 
     if (stages != null && !stages.isEmpty()) {
-      actionManager.sendActions(stages, actionRequest);
-      return getRequestStatusResponse(stage.getRequestId());
-    } else {
-      throw new AmbariException("Stage was not created");
+      requestStageContainer.addStages(stages);
     }
+
+    requestStageContainer.persist();
+    return requestStageContainer.getRequestStatusResponse();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index a425e95..6620577 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -115,6 +115,10 @@ public class KerberosHelper {
    */
   private Handler disableKerberosHandler = new DisableKerberosHandler();
 
+  /**
+   * The Handler implementation that provides the logic to ensure the existence of principals and keytabs
+   */
+  private Handler createPrincipalsAndKeytabsHandler = new CreatePrincipalsAndKeytabsHandler();
 
   /**
    * Toggles Kerberos security to enable it or remove it depending on the state of the cluster.
@@ -144,98 +148,49 @@ public class KerberosHelper {
                                               RequestStageContainer requestStageContainer)
       throws AmbariException {
 
-    if (cluster == null) {
-      String message = "The cluster object is not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    Config configClusterEnv = cluster.getDesiredConfigByType("cluster-env");
-    if (configClusterEnv == null) {
-      String message = "The 'cluster-env' configuration is not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    Map<String, String> clusterEnvProperties = configClusterEnv.getProperties();
-    if (clusterEnvProperties == null) {
-      String message = "The 'cluster-env' configuration properties are not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
+    KerberosDetails kerberosDetails = getKerberosDetails(cluster);
 
-    String securityEnabled = clusterEnvProperties.get("security_enabled");
-    if ((securityEnabled == null) || securityEnabled.isEmpty()) {
-      LOG.warn("Missing 'securityEnabled' property of cluster-env, unable to determine the cluster's security state. This may be ok.");
+    if (kerberosDetails.isSecurityEnabled()) {
+      LOG.info("Configuring Kerberos for realm {} on cluster, {}", kerberosDetails.getDefaultRealm(), cluster.getClusterName());
+      requestStageContainer = handle(cluster, kerberosDescriptor, kerberosDetails, null, null, requestStageContainer, enableKerberosHandler);
     } else {
-      String defaultRealm = clusterEnvProperties.get("kerberos_domain");
-
-      Config configKrb5Conf = cluster.getDesiredConfigByType("krb5-conf");
-      if (configKrb5Conf == null) {
-        String message = "The 'krb5-conf' configuration is not available";
-        LOG.error(message);
-        throw new AmbariException(message);
-      }
-
-      Map<String, String> krb5ConfProperties = configKrb5Conf.getProperties();
-      if (krb5ConfProperties == null) {
-        String message = "The 'krb5-conf' configuration properties are not available";
-        LOG.error(message);
-        throw new AmbariException(message);
-      }
-
-      Config configKerberosEnv = cluster.getDesiredConfigByType("kerberos-env");
-      if (configKerberosEnv == null) {
-        String message = "The 'kerberos-env' configuration is not available";
-        LOG.error(message);
-        throw new AmbariException(message);
-      }
-
-      Map<String, String> kerberosEnvProperties = configKerberosEnv.getProperties();
-      if (kerberosEnvProperties == null) {
-        String message = "The 'kerberos-env' configuration properties are not available";
-        LOG.error(message);
-        throw new AmbariException(message);
-      }
-
-      KDCType kdcType = null;
-      String kdcTypeProperty = kerberosEnvProperties.get("kdc_type");
-      if (kdcTypeProperty == null) {
-        // TODO: (rlevas) Only pull from kerberos-env, this is only for transitional purposes (AMBARI 9121)
-        kdcTypeProperty = krb5ConfProperties.get("kdc_type");
-      }
-      if (kdcTypeProperty != null) {
-        try {
-          kdcType = KDCType.translate(kdcTypeProperty);
-        } catch (IllegalArgumentException e) {
-          String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
-          LOG.error(message);
-          throw new AmbariException(message);
-        }
-      }
-
-      if (kdcType == null) {
-        // Set the KDCType to the the MIT_KDC as a fallback.
-        kdcType = KDCType.MIT_KDC;
-      }
-
-      if ("true".equalsIgnoreCase(securityEnabled)) {
-        LOG.info("Configuring Kerberos for realm {} on cluster, {}", defaultRealm, cluster.getClusterName());
-        requestStageContainer = handle(cluster, kerberosDescriptor, defaultRealm, kdcType, kerberosEnvProperties, requestStageContainer, enableKerberosHandler);
-      } else if ("false".equalsIgnoreCase(securityEnabled)) {
-        LOG.info("Disabling Kerberos from cluster, {}", cluster.getClusterName());
-        requestStageContainer = handle(cluster, kerberosDescriptor, defaultRealm, kdcType, kerberosEnvProperties, requestStageContainer, disableKerberosHandler);
-      } else {
-        String message = String.format("Invalid value for `security_enabled` property of cluster-env: %s", securityEnabled);
-        LOG.error(message);
-        throw new AmbariException(message);
-      }
+      LOG.info("Disabling Kerberos from cluster, {}", cluster.getClusterName());
+      requestStageContainer = handle(cluster, kerberosDescriptor, kerberosDetails, null, null, requestStageContainer, disableKerberosHandler);
     }
 
     return requestStageContainer;
   }
 
   /**
+   * Ensures the set of filtered principals and keytabs exist on the cluster.
+   * <p/>
+   * No configurations will be altered as a result of this operation, however principals and keytabs
+   * may be updated or created.
+   *
+   * @param cluster                the relevant Cluster
+   * @param kerberosDescriptor     a KerberosDescriptor containing updates to the descriptor already
+   *                               configured for the cluster
+   * @param serviceComponentFilter a Map of service names to component names indicating the relevant
+   *                               set of services and components - if null, no filter is relevant;
+   *                               if empty, the filter indicates no relevant services or components
+   * @param identityFilter         a Collection of identity names indicating the relevant identities -
+   *                               if null, no filter is relevant; if empty, the filter indicates no
+   *                               relevant identities
+   * @param requestStageContainer  a RequestStageContainer to place generated stages, if needed -
+   *                               if null a new RequestStageContainer will be created.
+   * @return the updated or a new RequestStageContainer containing the stages that need to be
+   * executed to complete this task; or null if no stages need to be executed.
+   * @throws AmbariException
+   */
+  public RequestStageContainer ensureIdentities(Cluster cluster, KerberosDescriptor kerberosDescriptor,
+                                                Map<String, Collection<String>> serviceComponentFilter,
+                                                Collection<String> identityFilter,
+                                                RequestStageContainer requestStageContainer) throws AmbariException {
+    return handle(cluster, kerberosDescriptor, getKerberosDetails(cluster), serviceComponentFilter, identityFilter,
+        requestStageContainer, createPrincipalsAndKeytabsHandler);
+  }
+
+  /**
    * Performs operations needed to enable to disable Kerberos on the relevant cluster.
    * <p/>
    * Iterates through the components installed on the relevant cluster and attempts to enable or
@@ -244,13 +199,17 @@ public class KerberosHelper {
    * The supplied Handler instance handles the logic on whether this process enables or disables
    * Kerberos.
    *
-   * @param cluster               the relevant Cluster
-   * @param kerberosDescriptor    the (derived) KerberosDescriptor
-   * @param realm                 the default Kerberos realm for the Cluster
-   * @param kdcType               a KDCType declaring the type of the relevant KDC
-   * @param kerberosEnvProperties a MAp of key/value pairs from the kerberos-env configuration
-   * @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
-   *                              if null a new RequestStageContainer will be created.
+   * @param cluster                the relevant Cluster
+   * @param kerberosDescriptor     the (derived) KerberosDescriptor
+   * @param kerberosDetails        a KerberosDetails containing information about relevant Kerberos configuration
+   * @param serviceComponentFilter a Map of service names to component names indicating the relevant
+   *                               set of services and components - if null, no filter is relevant;
+   *                               if empty, the filter indicates no relevant services or components
+   * @param identityFilter         a Collection of identity names indicating the relevant identities -
+   *                               if null, no filter is relevant; if empty, the filter indicates no
+   *                               relevant identities
+   * @param requestStageContainer  a RequestStageContainer to place generated stages, if needed -
+   *                               if null a new RequestStageContainer will be created.
    * @return the updated or a new RequestStageContainer containing the stages that need to be
    * executed to complete this task; or null if no stages need to be executed.
    * @throws AmbariException
@@ -258,8 +217,10 @@ public class KerberosHelper {
   @Transactional
   private RequestStageContainer handle(Cluster cluster,
                                        KerberosDescriptor kerberosDescriptor,
-                                       String realm, KDCType kdcType,
-                                       Map<String, String> kerberosEnvProperties, RequestStageContainer requestStageContainer,
+                                       KerberosDetails kerberosDetails,
+                                       Map<String, Collection<String>> serviceComponentFilter,
+                                       Collection<String> identityFilter,
+                                       RequestStageContainer requestStageContainer,
                                        Handler handler) throws AmbariException {
 
     Map<String, Service> services = cluster.getServices();
@@ -330,6 +291,7 @@ public class KerberosHelper {
                 // Add the current hostname under "host" and "hostname"
                 generalProperties.put("host", hostname);
                 generalProperties.put("hostname", hostname);
+                generalProperties.put("cluster_name", clusterName);
 
                 if (configurations.get("") == null) {
                   configurations.put("", generalProperties);
@@ -342,47 +304,55 @@ public class KerberosHelper {
                 // keytab files, and configurations need to be created or updated.
                 for (ServiceComponentHost sch : serviceComponentHosts) {
                   String serviceName = sch.getServiceName();
-                  KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
-
-                  if (serviceDescriptor != null) {
-                    KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(sch.getServiceComponentName());
-                    List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
-
-                    if (componentDescriptor != null) {
-                      List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
-                      int identitiesAdded = 0;
-
-                      // Test to see if this component should be process by querying the handler
-                      if (handler.shouldProcess(desiredSecurityState, sch)) {
-                        // Calculate the set of configurations to update and replace any variables
-                        // using the previously calculated Map of configurations for the host.
-                        mergeConfigurations(kerberosConfigurations,
-                            componentDescriptor.getConfigurations(true), configurations);
-
-                        // Lazily create the KerberosActionDataFileBuilder instance...
-                        if (kerberosActionDataFileBuilder == null) {
-                          kerberosActionDataFileBuilder = new KerberosActionDataFileBuilder(indexFile);
-                        }
 
-                        // Add service-level principals (and keytabs)
-                        identitiesAdded += addIdentities(kerberosActionDataFileBuilder,
-                            serviceIdentities, sch, configurations);
+                  // If there is no filter or the filter contains the current service name...
+                  if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey(serviceName)) {
+                    Collection<String> componentFilter = (serviceComponentFilter == null) ? null : serviceComponentFilter.get(serviceName);
+                    KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
+
+                    if (serviceDescriptor != null) {
+                      String componentName = sch.getServiceComponentName();
+
+                      // If there is no filter or the filter contains the current component name,
+                      // test to see if this component should be process by querying the handler...
+                      if (((componentFilter == null) || componentFilter.contains(componentName)) && handler.shouldProcess(desiredSecurityState, sch)) {
+                        KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
+                        List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
+
+                        if (componentDescriptor != null) {
+                          List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
+                          int identitiesAdded = 0;
+
+                          // Calculate the set of configurations to update and replace any variables
+                          // using the previously calculated Map of configurations for the host.
+                          mergeConfigurations(kerberosConfigurations,
+                              componentDescriptor.getConfigurations(true), configurations);
+
+                          // Lazily create the KerberosActionDataFileBuilder instance...
+                          if (kerberosActionDataFileBuilder == null) {
+                            kerberosActionDataFileBuilder = new KerberosActionDataFileBuilder(indexFile);
+                          }
+
+                          // Add service-level principals (and keytabs)
+                          identitiesAdded += addIdentities(kerberosActionDataFileBuilder, serviceIdentities,
+                              identityFilter, hostname, serviceName, componentName, configurations);
+
+                          // Add component-level principals (and keytabs)
+                          identitiesAdded += addIdentities(kerberosActionDataFileBuilder, componentIdentities,
+                              identityFilter, hostname, serviceName, componentName, configurations);
 
-                        // Add component-level principals (and keytabs)
-                        identitiesAdded += addIdentities(kerberosActionDataFileBuilder,
-                            componentIdentities, sch, configurations);
+                          if (identitiesAdded > 0) {
+                            serviceComponentHostsToProcess.add(sch);
+                          }
 
-                        if (identitiesAdded > 0) {
-                          serviceComponentHostsToProcess.add(sch);
+                          // Add component-level principals to auth_to_local builder
+                          addIdentities(authToLocalBuilder, componentIdentities, identityFilter, configurations);
                         }
-                      }
 
-                      // Add component-level principals to auth_to_local builder
-                      addIdentities(authToLocalBuilder, componentIdentities, configurations);
+                        // Add service-level principals to auth_to_local builder
+                        addIdentities(authToLocalBuilder, serviceIdentities, identityFilter, configurations);
+                      }
                     }
-
-                    // Add service-level principals to auth_to_local builder
-                    addIdentities(authToLocalBuilder, serviceIdentities, configurations);
                   }
                 }
               }
@@ -420,7 +390,7 @@ public class KerberosHelper {
                       "}"
               );
             } else {
-              KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kdcType);
+              KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
 
               if (operationHandler == null) {
                 throw new AmbariException("Failed to get an appropriate Kerberos operation handler.");
@@ -429,7 +399,7 @@ public class KerberosHelper {
                 KerberosCredential kerberosCredentials = KerberosCredential.decrypt(credentials, key);
 
                 try {
-                  operationHandler.open(kerberosCredentials, realm, kerberosEnvProperties);
+                  operationHandler.open(kerberosCredentials, kerberosDetails.getDefaultRealm(), kerberosDetails.getKerberosEnvProperties());
                   if (!operationHandler.testAdministratorCredentials()) {
                     throw new IllegalArgumentException(
                         "Invalid KDC administrator credentials.\n" +
@@ -487,7 +457,7 @@ public class KerberosHelper {
             for (Map.Entry<String, String> entry : configuration.entrySet()) {
               if ("_AUTH_TO_LOCAL_RULES".equals(entry.getValue())) {
                 if (authToLocal == null) {
-                  authToLocal = authToLocalBuilder.generate(realm);
+                  authToLocal = authToLocalBuilder.generate(kerberosDetails.getDefaultRealm());
                 }
 
                 entry.setValue(authToLocal);
@@ -520,16 +490,15 @@ public class KerberosHelper {
         }
 
         // Use the handler implementation to setup the relevant stages.
-        int lastStageId = handler.createStages(cluster, hosts, kerberosConfigurations,
-            clusterHostInfoJson, hostParamsJson, event, roleCommandOrder, realm, kdcType,
-            dataDirectory, requestStageContainer, serviceComponentHostsToProcess);
+        handler.createStages(cluster, hosts, kerberosConfigurations, clusterHostInfoJson,
+            hostParamsJson, event, roleCommandOrder, kerberosDetails, dataDirectory,
+            requestStageContainer, serviceComponentHostsToProcess);
 
         // Add the cleanup stage...
-
         Map<String, String> commandParameters = new HashMap<String, String>();
         commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
 
-        Stage stage = createServerActionStage(++lastStageId,
+        Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
             cluster,
             requestStageContainer.getId(),
             "Finalize Operations",
@@ -549,14 +518,25 @@ public class KerberosHelper {
         for (ServiceComponentHost sch : serviceComponentHostsToProcess) {
           // Update the desired and current states for the ServiceComponentHost
           // using new state information from the the handler implementation
-          sch.setDesiredSecurityState(handler.getNewDesiredSCHSecurityState());
-          sch.setSecurityState(handler.getNewSCHSecurityState());
+          SecurityState newSecurityState;
+
+          newSecurityState = handler.getNewDesiredSCHSecurityState();
+          if (newSecurityState != null) {
+            sch.setDesiredSecurityState(newSecurityState);
+          }
+
+          newSecurityState = handler.getNewSCHSecurityState();
+          if (newSecurityState != null) {
+            sch.setSecurityState(newSecurityState);
+          }
         }
       }
 
       // If all goes well, set all services to _desire_ to be secured or unsecured, depending on handler
-      for (Service service : services.values()) {
-        service.setSecurityState(desiredSecurityState);
+      if (desiredSecurityState != null) {
+        for (Service service : services.values()) {
+          service.setSecurityState(desiredSecurityState);
+        }
       }
     }
 
@@ -564,6 +544,108 @@ public class KerberosHelper {
   }
 
   /**
+   * Gathers the Kerberos-related data from configurations and stores it in a new KerberosDetails
+   * instance.
+   *
+   * @param cluster the relevant Cluster
+   * @return a new KerberosDetails with the collected configuration data
+   * @throws AmbariException
+   */
+  private KerberosDetails getKerberosDetails(Cluster cluster) throws AmbariException {
+    KerberosDetails kerberosDetails = new KerberosDetails();
+
+    if (cluster == null) {
+      String message = "The cluster object is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Config configClusterEnv = cluster.getDesiredConfigByType("cluster-env");
+    if (configClusterEnv == null) {
+      String message = "The 'cluster-env' configuration is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Map<String, String> clusterEnvProperties = configClusterEnv.getProperties();
+    if (clusterEnvProperties == null) {
+      String message = "The 'cluster-env' configuration properties are not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    String securityEnabled = clusterEnvProperties.get("security_enabled");
+    if ((securityEnabled == null) || securityEnabled.isEmpty()) {
+      String message = "Missing 'securityEnabled' property of cluster-env, unable to determine the cluster's security state";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    if ("true".equalsIgnoreCase(securityEnabled)) {
+      kerberosDetails.setSecurityEnabled(true);
+    } else if ("false".equalsIgnoreCase(securityEnabled)) {
+      kerberosDetails.setSecurityEnabled(false);
+    } else {
+      String message = String.format("Invalid value for `security_enabled` property of cluster-env: %s", securityEnabled);
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Config configKrb5Conf = cluster.getDesiredConfigByType("krb5-conf");
+    if (configKrb5Conf == null) {
+      String message = "The 'krb5-conf' configuration is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Map<String, String> krb5ConfProperties = configKrb5Conf.getProperties();
+    if (krb5ConfProperties == null) {
+      String message = "The 'krb5-conf' configuration properties are not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Config configKerberosEnv = cluster.getDesiredConfigByType("kerberos-env");
+    if (configKerberosEnv == null) {
+      String message = "The 'kerberos-env' configuration is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Map<String, String> kerberosEnvProperties = configKerberosEnv.getProperties();
+    if (kerberosEnvProperties == null) {
+      String message = "The 'kerberos-env' configuration properties are not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    KDCType kdcType = null;
+    String kdcTypeProperty = kerberosEnvProperties.get("kdc_type");
+    if (kdcTypeProperty == null) {
+      // TODO: (rlevas) Only pull from kerberos-env, this is only for transitional purposes (AMBARI 9121)
+      kdcTypeProperty = krb5ConfProperties.get("kdc_type");
+    }
+    if (kdcTypeProperty != null) {
+      try {
+        kdcType = KDCType.translate(kdcTypeProperty);
+      } catch (IllegalArgumentException e) {
+        String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
+        LOG.error(message);
+        throw new AmbariException(message);
+      }
+    }
+
+    kerberosDetails.setDefaultRealm(krb5ConfProperties.get("realm"));
+
+    // Set the KDCType to the the MIT_KDC as a fallback.
+    kerberosDetails.setKdcType((kdcType == null) ? KDCType.MIT_KDC : kdcType);
+
+    kerberosDetails.setKerberosEnvProperties(kerberosEnvProperties);
+
+    return kerberosDetails;
+  }
+
+  /**
    * Creates a temporary directory within the system temporary directory
    * <p/>
    * The resulting directory is to be removed by the caller when desired.
@@ -684,63 +766,74 @@ public class KerberosHelper {
    *                                      records
    * @param identities                    a List of KerberosIdentityDescriptors to add to the data
    *                                      file
-   * @param sch                           the relevant ServiceComponentHost
+   * @param identityFilter                a Collection of identity names indicating the relevant identities -
+   *                                      if null, no filter is relevant; if empty, the filter indicates no
+   *                                      relevant identities
+   * @param hostname                      the relevant hostname
+   * @param serviceName                   the relevant service name
+   * @param componentName                 the relevant component name
    * @param configurations                a Map of configurations to use a replacements for variables
    *                                      in identity fields
    * @return an integer indicating the number of identities added to the data file
    * @throws java.io.IOException if an error occurs while writing a record to the data file
    */
   private int addIdentities(KerberosActionDataFileBuilder kerberosActionDataFileBuilder,
-                            List<KerberosIdentityDescriptor> identities, ServiceComponentHost sch,
-                            Map<String, Map<String, String>> configurations) throws IOException {
+                            Collection<KerberosIdentityDescriptor> identities,
+                            Collection<String> identityFilter, String hostname, String serviceName,
+                            String componentName, Map<String, Map<String, String>> configurations)
+      throws IOException {
     int identitiesAdded = 0;
 
     if (identities != null) {
       for (KerberosIdentityDescriptor identity : identities) {
-        KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
-        String principal = null;
-        String principalType = null;
-        String principalConfiguration = null;
-
-        if (principalDescriptor != null) {
-          principal = KerberosDescriptor.replaceVariables(principalDescriptor.getValue(), configurations);
-          principalType = principalDescriptor.getType().name().toLowerCase();
-          principalConfiguration = KerberosDescriptor.replaceVariables(principalDescriptor.getConfiguration(), configurations);
-        }
-
-        if (principal != null) {
-          KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
-          String keytabFilePath = null;
-          String keytabFileOwnerName = null;
-          String keytabFileOwnerAccess = null;
-          String keytabFileGroupName = null;
-          String keytabFileGroupAccess = null;
-          String keytabFileConfiguration = null;
-
-          if (keytabDescriptor != null) {
-            keytabFilePath = KerberosDescriptor.replaceVariables(keytabDescriptor.getFile(), configurations);
-            keytabFileOwnerName = KerberosDescriptor.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
-            keytabFileOwnerAccess = KerberosDescriptor.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations);
-            keytabFileGroupName = KerberosDescriptor.replaceVariables(keytabDescriptor.getGroupName(), configurations);
-            keytabFileGroupAccess = KerberosDescriptor.replaceVariables(keytabDescriptor.getGroupAccess(), configurations);
-            keytabFileConfiguration = KerberosDescriptor.replaceVariables(keytabDescriptor.getConfiguration(), configurations);
+        // If there is no filter or the filter contains the current identity's name...
+        if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
+          KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
+          String principal = null;
+          String principalType = null;
+          String principalConfiguration = null;
+
+          if (principalDescriptor != null) {
+            principal = KerberosDescriptor.replaceVariables(principalDescriptor.getValue(), configurations);
+            principalType = principalDescriptor.getType().name().toLowerCase();
+            principalConfiguration = KerberosDescriptor.replaceVariables(principalDescriptor.getConfiguration(), configurations);
           }
 
-          // Append an entry to the action data file builder...
-          kerberosActionDataFileBuilder.addRecord(sch.getHostName(),
-              sch.getServiceName(),
-              sch.getServiceComponentName(),
-              principal,
-              principalType,
-              principalConfiguration,
-              keytabFilePath,
-              keytabFileOwnerName,
-              keytabFileOwnerAccess,
-              keytabFileGroupName,
-              keytabFileGroupAccess,
-              keytabFileConfiguration);
-
-          identitiesAdded++;
+          if (principal != null) {
+            KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
+            String keytabFilePath = null;
+            String keytabFileOwnerName = null;
+            String keytabFileOwnerAccess = null;
+            String keytabFileGroupName = null;
+            String keytabFileGroupAccess = null;
+            String keytabFileConfiguration = null;
+
+            if (keytabDescriptor != null) {
+              keytabFilePath = KerberosDescriptor.replaceVariables(keytabDescriptor.getFile(), configurations);
+              keytabFileOwnerName = KerberosDescriptor.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
+              keytabFileOwnerAccess = KerberosDescriptor.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations);
+              keytabFileGroupName = KerberosDescriptor.replaceVariables(keytabDescriptor.getGroupName(), configurations);
+              keytabFileGroupAccess = KerberosDescriptor.replaceVariables(keytabDescriptor.getGroupAccess(), configurations);
+              keytabFileConfiguration = KerberosDescriptor.replaceVariables(keytabDescriptor.getConfiguration(), configurations);
+            }
+
+            // Append an entry to the action data file builder...
+            kerberosActionDataFileBuilder.addRecord(
+                hostname,
+                serviceName,
+                componentName,
+                principal,
+                principalType,
+                principalConfiguration,
+                keytabFilePath,
+                keytabFileOwnerName,
+                keytabFileOwnerAccess,
+                keytabFileGroupName,
+                keytabFileGroupAccess,
+                keytabFileConfiguration);
+
+            identitiesAdded++;
+          }
         }
       }
     }
@@ -753,20 +846,26 @@ public class KerberosHelper {
    *
    * @param authToLocalBuilder the AuthToLocalBuilder to use to build the auth_to_local mapping
    * @param identities         a List of KerberosIdentityDescriptors to process
+   * @param identityFilter     a Collection of identity names indicating the relevant identities -
+   *                           if null, no filter is relevant; if empty, the filter indicates no
+   *                           relevant identities
    * @param configurations     a Map of configurations to use a replacements for variables
    *                           in identity fields
    * @throws org.apache.ambari.server.AmbariException
    */
   private void addIdentities(AuthToLocalBuilder authToLocalBuilder,
-                             List<KerberosIdentityDescriptor> identities,
+                             List<KerberosIdentityDescriptor> identities, Collection<String> identityFilter,
                              Map<String, Map<String, String>> configurations) throws AmbariException {
     if (identities != null) {
       for (KerberosIdentityDescriptor identity : identities) {
-        KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
-        if (principalDescriptor != null) {
-          authToLocalBuilder.append(
-              KerberosDescriptor.replaceVariables(principalDescriptor.getValue(), configurations),
-              KerberosDescriptor.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
+        // If there is no filter or the filter contains the current identity's name...
+        if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
+          KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
+          if (principalDescriptor != null) {
+            authToLocalBuilder.append(
+                KerberosDescriptor.replaceVariables(principalDescriptor.getValue(), configurations),
+                KerberosDescriptor.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
+          }
         }
       }
     }
@@ -961,7 +1060,7 @@ public class KerberosHelper {
    * Handler is an interface that needs to be implemented by toggle handler classes to do the
    * "right" thing for the task at hand.
    */
-  private interface Handler {
+  private abstract class Handler {
     /**
      * Tests the Service and ServiceComponentHost to see if they are in the appropriate security
      * state to be processed for the relevant task.
@@ -972,29 +1071,32 @@ public class KerberosHelper {
      * state to be processed; otherwise false
      * @throws AmbariException of an error occurs while testing
      */
-    boolean shouldProcess(SecurityState desiredSecurityState, ServiceComponentHost sch) throws AmbariException;
+    abstract boolean shouldProcess(SecurityState desiredSecurityState, ServiceComponentHost sch) throws AmbariException;
 
     /**
      * Returns the new SecurityState to be set as the ServiceComponentHost's _desired_ SecurityState.
      *
-     * @return a SecurityState to be set as the ServiceComponentHost's _desired_ SecurityState
+     * @return a SecurityState to be set as the ServiceComponentHost's _desired_ SecurityState;
+     * or null if no state change is desired
      */
-    SecurityState getNewDesiredSCHSecurityState();
+    abstract SecurityState getNewDesiredSCHSecurityState();
 
     /**
      * Returns the new SecurityState to be set as the ServiceComponentHost's _current_ SecurityState.
      *
-     * @return a SecurityState to be set as the ServiceComponentHost's _current_ SecurityState
+     * @return a SecurityState to be set as the ServiceComponentHost's _current_ SecurityState;
+     * or null if no state change is desired
      */
-    SecurityState getNewSCHSecurityState();
+    abstract SecurityState getNewSCHSecurityState();
 
 
     /**
      * Returns the new SecurityState to be set as the Service's SecurityState.
      *
-     * @return a SecurityState to be set as the Service's SecurityState
+     * @return a SecurityState to be set as the Service's SecurityState;
+     * or null if no state change is desired
      */
-    SecurityState getNewServiceSecurityState();
+    abstract SecurityState getNewServiceSecurityState();
 
     /**
      * Creates the necessary stages to complete the relevant task and stores them in the supplied
@@ -1012,8 +1114,7 @@ public class KerberosHelper {
      * @param hostParams             JSON-encoded host parameters
      * @param event                  a ServiceComponentHostServerActionEvent to pass to any created tasks
      * @param roleCommandOrder       the RoleCommandOrder to use to generate the RoleGraph for any newly created Stages
-     * @param realm                  a String declaring the cluster's Kerberos realm
-     * @param kdcType                a KDCType declaring the type of the relevant KDC
+     * @param kerberosDetails        a KerberosDetails containing the information about the relevant Kerberos configuration
      * @param dataDirectory          a File pointing to the (temporary) data directory
      * @param requestStageContainer  a RequestStageContainer to store the new stages in, if null a
      *                               new RequestStageContainer will be created
@@ -1021,16 +1122,119 @@ public class KerberosHelper {
      * @return the last stage id generated, or -1 if no stages were created
      * @throws AmbariException if an error occurs while creating the relevant stages
      */
-    int createStages(Cluster cluster, Map<String, Host> hosts,
-                     Map<String, Map<String, String>> kerberosConfigurations,
-                     String clusterHostInfo, String hostParams,
-                     ServiceComponentHostServerActionEvent event,
-                     RoleCommandOrder roleCommandOrder,
-                     String realm, KDCType kdcType, File dataDirectory,
-                     RequestStageContainer requestStageContainer,
-                     List<ServiceComponentHost> serviceComponentHosts)
+    abstract long createStages(Cluster cluster, Map<String, Host> hosts,
+                               Map<String, Map<String, String>> kerberosConfigurations,
+                               String clusterHostInfo, String hostParams,
+                               ServiceComponentHostServerActionEvent event,
+                               RoleCommandOrder roleCommandOrder,
+                               KerberosDetails kerberosDetails, File dataDirectory,
+                               RequestStageContainer requestStageContainer,
+                               List<ServiceComponentHost> serviceComponentHosts)
         throws AmbariException;
 
+
+    public void addCreatePrincipalsStage(Cluster cluster, String clusterHostInfoJson,
+                                         String hostParamsJson, ServiceComponentHostServerActionEvent event,
+                                         Map<String, String> commandParameters,
+                                         RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
+        throws AmbariException {
+      Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
+          cluster,
+          requestStageContainer.getId(),
+          "Create Principals",
+          clusterHostInfoJson,
+          "{}",
+          hostParamsJson,
+          CreatePrincipalsServerAction.class,
+          event,
+          commandParameters,
+          "Create Principals",
+          1200);
+
+      RoleGraph roleGraph = new RoleGraph(roleCommandOrder);
+      roleGraph.build(stage);
+      requestStageContainer.addStages(roleGraph.getStages());
+    }
+
+    public void addCreateKeytabFilesStage(Cluster cluster, String clusterHostInfoJson,
+                                          String hostParamsJson, ServiceComponentHostServerActionEvent event,
+                                          Map<String, String> commandParameters,
+                                          RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
+        throws AmbariException {
+      Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
+          cluster,
+          requestStageContainer.getId(),
+          "Create Keytabs",
+          clusterHostInfoJson,
+          "{}",
+          hostParamsJson,
+          CreateKeytabFilesServerAction.class,
+          event,
+          commandParameters,
+          "Create Keytabs",
+          1200);
+
+      RoleGraph roleGraph = new RoleGraph(roleCommandOrder);
+      roleGraph.build(stage);
+      requestStageContainer.addStages(roleGraph.getStages());
+    }
+
+    public void addDistributeKeytabFilesStage(Cluster cluster, List<ServiceComponentHost> serviceComponentHosts,
+                                              String clusterHostInfoJson, String hostParamsJson,
+                                              Map<String, String> commandParameters,
+                                              RoleCommandOrder roleCommandOrder,
+                                              RequestStageContainer requestStageContainer)
+        throws AmbariException {
+      Stage stage = createNewStage(requestStageContainer.getLastStageId(),
+          cluster,
+          requestStageContainer.getId(),
+          "Distribute Keytabs",
+          clusterHostInfoJson,
+          StageUtils.getGson().toJson(commandParameters),
+          hostParamsJson);
+
+      if (!serviceComponentHosts.isEmpty()) {
+        List<String> hostsToUpdate = createUniqueHostList(serviceComponentHosts);
+        Map<String, String> requestParams = new HashMap<String, String>();
+        List<RequestResourceFilter> requestResourceFilters = new ArrayList<RequestResourceFilter>();
+        RequestResourceFilter reqResFilter = new RequestResourceFilter("KERBEROS", "KERBEROS_CLIENT", hostsToUpdate);
+        requestResourceFilters.add(reqResFilter);
+
+        ActionExecutionContext actionExecContext = new ActionExecutionContext(
+            cluster.getClusterName(),
+            "SET_KEYTAB",
+            requestResourceFilters,
+            requestParams);
+        customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestParams, false);
+      }
+
+      RoleGraph roleGraph = new RoleGraph(roleCommandOrder);
+      roleGraph.build(stage);
+      requestStageContainer.addStages(roleGraph.getStages());
+    }
+
+    public void addUpdateConfigurationsStage(Cluster cluster, String clusterHostInfoJson,
+                                             String hostParamsJson, ServiceComponentHostServerActionEvent event,
+                                             Map<String, String> commandParameters,
+                                             RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
+        throws AmbariException {
+      Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
+          cluster,
+          requestStageContainer.getId(),
+          "Update Configurations",
+          clusterHostInfoJson,
+          "{}",
+          hostParamsJson,
+          UpdateKerberosConfigsServerAction.class,
+          event,
+          commandParameters,
+          "Update Service Configurations",
+          1200);
+
+      RoleGraph roleGraph = new RoleGraph(roleCommandOrder);
+      roleGraph.build(stage);
+      requestStageContainer.addStages(roleGraph.getStages());
+    }
   }
 
   /**
@@ -1047,10 +1251,9 @@ public class KerberosHelper {
    * <li>create keytab files</li>
    * <li>distribute keytab files to the appropriate hosts</li>
    * <li>update relevant configurations</li>
-   * <li>restart services</li>
    * </ol>
    */
-  private class EnableKerberosHandler implements Handler {
+  private class EnableKerberosHandler extends Handler {
     @Override
     public boolean shouldProcess(SecurityState desiredSecurityState, ServiceComponentHost sch) throws AmbariException {
       return (desiredSecurityState == SecurityState.SECURED_KERBEROS) &&
@@ -1075,24 +1278,19 @@ public class KerberosHelper {
     }
 
     @Override
-    public int createStages(Cluster cluster, Map<String, Host> hosts,
-                            Map<String, Map<String, String>> kerberosConfigurations,
-                            String clusterHostInfoJson, String hostParamsJson,
-                            ServiceComponentHostServerActionEvent event,
-                            RoleCommandOrder roleCommandOrder, String realm, KDCType kdcType,
-                            File dataDirectory, RequestStageContainer requestStageContainer,
-                            List<ServiceComponentHost> serviceComponentHosts)
+    public long createStages(Cluster cluster, Map<String, Host> hosts,
+                             Map<String, Map<String, String>> kerberosConfigurations,
+                             String clusterHostInfoJson, String hostParamsJson,
+                             ServiceComponentHostServerActionEvent event,
+                             RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
+                             File dataDirectory, RequestStageContainer requestStageContainer,
+                             List<ServiceComponentHost> serviceComponentHosts)
         throws AmbariException {
       // If there are principals, keytabs, and configurations to process, setup the following sages:
       //  1) generate principals
       //  2) generate keytab files
       //  3) distribute keytab files
       //  4) update configurations
-      //  4) restart services
-
-      RoleGraph roleGraph;
-      Stage stage;
-      int stageId = -1;
 
       // If a RequestStageContainer does not already exist, create a new one...
       if (requestStageContainer == null) {
@@ -1140,95 +1338,31 @@ public class KerberosHelper {
 
       Map<String, String> commandParameters = new HashMap<String, String>();
       commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
-      commandParameters.put(KerberosServerAction.DEFAULT_REALM, realm);
-      commandParameters.put(KerberosServerAction.KDC_TYPE, kdcType.name());
+      commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
+      commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
       commandParameters.put(KerberosServerAction.ADMINISTRATOR_CREDENTIAL, getEncryptedAdministratorCredentials(cluster));
 
       // *****************************************************************
       // Create stage to create principals
-      stage = createServerActionStage(++stageId,
-          cluster,
-          requestStageContainer.getId(),
-          "Create Principals",
-          clusterHostInfoJson,
-          "{}",
-          hostParamsJson,
-          CreatePrincipalsServerAction.class,
-          event,
-          commandParameters,
-          "Create Principals",
-          1200);
-
-      roleGraph = new RoleGraph(roleCommandOrder);
-      roleGraph.build(stage);
-      requestStageContainer.addStages(roleGraph.getStages());
+      addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
+          roleCommandOrder, requestStageContainer);
 
       // *****************************************************************
       // Create stage to generate keytabs
-      stage = createServerActionStage(++stageId,
-          cluster,
-          requestStageContainer.getId(),
-          "Create Keytabs",
-          clusterHostInfoJson,
-          "{}",
-          hostParamsJson,
-          CreateKeytabFilesServerAction.class,
-          event,
-          commandParameters,
-          "Create Keytabs",
-          1200);
-
-      roleGraph = new RoleGraph(roleCommandOrder);
-      roleGraph.build(stage);
-      requestStageContainer.addStages(roleGraph.getStages());
+      addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
+          roleCommandOrder, requestStageContainer);
 
+      // *****************************************************************
       // Create stage to distribute keytabs
-      stage = createNewStage(++stageId,
-          cluster,
-          requestStageContainer.getId(),
-          "Distribute Keytabs",
-          clusterHostInfoJson,
-          StageUtils.getGson().toJson(commandParameters),
-          hostParamsJson);
-
-      if (!serviceComponentHosts.isEmpty()) {
-        List<String> hostsToUpdate = createUniqueHostList(serviceComponentHosts);
-        Map<String, String> requestParams = new HashMap<String, String>();
-        List<RequestResourceFilter> requestResourceFilters = new ArrayList<RequestResourceFilter>();
-        RequestResourceFilter reqResFilter = new RequestResourceFilter("KERBEROS", "KERBEROS_CLIENT", hostsToUpdate);
-        requestResourceFilters.add(reqResFilter);
-
-        ActionExecutionContext actionExecContext = new ActionExecutionContext(
-            cluster.getClusterName(),
-            "SET_KEYTAB",
-            requestResourceFilters,
-            requestParams);
-        customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestParams, false);
-      }
-
-      roleGraph = new RoleGraph(roleCommandOrder);
-      roleGraph.build(stage);
-      requestStageContainer.addStages(roleGraph.getStages());
+      addDistributeKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson, hostParamsJson,
+          commandParameters, roleCommandOrder, requestStageContainer);
 
+      // *****************************************************************
       // Create stage to update configurations of services
-      stage = createServerActionStage(++stageId,
-          cluster,
-          requestStageContainer.getId(),
-          "Update Service Configurations",
-          clusterHostInfoJson,
-          "{}",
-          hostParamsJson,
-          UpdateKerberosConfigsServerAction.class,
-          event,
-          commandParameters,
-          "Update Service Configurations",
-          1200);
-
-      roleGraph = new RoleGraph(roleCommandOrder);
-      roleGraph.build(stage);
-      requestStageContainer.addStages(roleGraph.getStages());
+      addUpdateConfigurationsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
+          roleCommandOrder, requestStageContainer);
 
-      return stageId;
+      return requestStageContainer.getLastStageId();
     }
 
   }
@@ -1249,7 +1383,7 @@ public class KerberosHelper {
    * <li>restart services</li>
    * </ol>
    */
-  private class DisableKerberosHandler implements Handler {
+  private class DisableKerberosHandler extends Handler {
     @Override
     public boolean shouldProcess(SecurityState desiredSecurityState, ServiceComponentHost sch) throws AmbariException {
       return (desiredSecurityState == SecurityState.UNSECURED) &&
@@ -1274,13 +1408,13 @@ public class KerberosHelper {
     }
 
     @Override
-    public int createStages(Cluster cluster, Map<String, Host> hosts,
-                            Map<String, Map<String, String>> kerberosConfigurations,
-                            String clusterHostInfoJson, String hostParamsJson,
-                            ServiceComponentHostServerActionEvent event,
-                            RoleCommandOrder roleCommandOrder, String realm, KDCType kdcType,
-                            File dataDirectory, RequestStageContainer requestStageContainer,
-                            List<ServiceComponentHost> serviceComponentHosts) {
+    public long createStages(Cluster cluster, Map<String, Host> hosts,
+                             Map<String, Map<String, String>> kerberosConfigurations,
+                             String clusterHostInfoJson, String hostParamsJson,
+                             ServiceComponentHostServerActionEvent event,
+                             RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
+                             File dataDirectory, RequestStageContainer requestStageContainer,
+                             List<ServiceComponentHost> serviceComponentHosts) {
       // TODO (rlevas): If there are principals, keytabs, and configurations to process, setup the following sages:
       //  1) remove principals
       //  2) remove keytab files
@@ -1289,4 +1423,130 @@ public class KerberosHelper {
       return -1;
     }
   }
+
+  /**
+   * CreatePrincipalsAndKeytabsHandler is an implementation of the Handler interface used to create
+   * principals and keytabs and distribute them throughout the cluster.  This is similar to enabling
+   * Kerberos however no states or configurations will be updated.
+   * <p/>
+   * To complete the process, this implementation creates the following stages:
+   * <ol>
+   * <li>create principals</li>
+   * <li>create keytab files</li>
+   * <li>distribute keytab files to the appropriate hosts</li>
+   * </ol>
+   */
+  private class CreatePrincipalsAndKeytabsHandler extends Handler {
+    @Override
+    public boolean shouldProcess(SecurityState desiredSecurityState, ServiceComponentHost sch) throws AmbariException {
+      return (maintenanceStateHelper.getEffectiveState(sch) == MaintenanceState.OFF);
+    }
+
+    @Override
+    public SecurityState getNewDesiredSCHSecurityState() {
+      return null;
+    }
+
+    @Override
+    public SecurityState getNewSCHSecurityState() {
+      return null;
+    }
+
+    @Override
+    public SecurityState getNewServiceSecurityState() {
+      return null;
+    }
+
+    @Override
+    public long createStages(Cluster cluster, Map<String, Host> hosts,
+                             Map<String, Map<String, String>> kerberosConfigurations,
+                             String clusterHostInfoJson, String hostParamsJson,
+                             ServiceComponentHostServerActionEvent event,
+                             RoleCommandOrder roleCommandOrder, KerberosDetails kerberosDetails,
+                             File dataDirectory, RequestStageContainer requestStageContainer,
+                             List<ServiceComponentHost> serviceComponentHosts)
+        throws AmbariException {
+      // If there are principals and keytabs to process, setup the following sages:
+      //  1) generate principals
+      //  2) generate keytab files
+      //  3) distribute keytab files
+
+      // If a RequestStageContainer does not already exist, create a new one...
+      if (requestStageContainer == null) {
+        requestStageContainer = new RequestStageContainer(
+            actionManager.getNextRequestId(),
+            null,
+            requestFactory,
+            actionManager);
+      }
+
+      Map<String, String> commandParameters = new HashMap<String, String>();
+      commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
+      commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
+      commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name());
+      commandParameters.put(KerberosServerAction.ADMINISTRATOR_CREDENTIAL, getEncryptedAdministratorCredentials(cluster));
+
+      // *****************************************************************
+      // Create stage to create principals
+      super.addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event,
+          commandParameters, roleCommandOrder, requestStageContainer);
+
+      // *****************************************************************
+      // Create stage to generate keytabs
+      addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event,
+          commandParameters, roleCommandOrder, requestStageContainer);
+
+      // Create stage to distribute keytabs
+      addDistributeKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson,
+          hostParamsJson, commandParameters, roleCommandOrder, requestStageContainer);
+
+      return requestStageContainer.getLastStageId();
+    }
+  }
+
+
+  /**
+   * KerberosDetails is a helper class to hold the details of the relevant Kerberos-specific
+   * configurations so they may be passed around more easily.
+   */
+  private static class KerberosDetails {
+    private boolean securityEnabled;
+    private String defaultRealm;
+    private KDCType kdcType;
+    private Map<String, String> kerberosEnvProperties;
+
+
+    public void setSecurityEnabled(boolean securityEnabled) {
+      this.securityEnabled = securityEnabled;
+    }
+
+    public boolean isSecurityEnabled() {
+      return securityEnabled;
+    }
+
+    public void setDefaultRealm(String defaultRealm) {
+      this.defaultRealm = defaultRealm;
+    }
+
+    public String getDefaultRealm() {
+      return defaultRealm;
+    }
+
+    public void setKdcType(KDCType kdcType) {
+      this.kdcType = kdcType;
+    }
+
+    public KDCType getKdcType() {
+      return kdcType;
+    }
+
+    public void setKerberosEnvProperties(Map<String, String> kerberosEnvProperties) {
+      this.kerberosEnvProperties = kerberosEnvProperties;
+    }
+
+    public Map<String, String> getKerberosEnvProperties() {
+      return kerberosEnvProperties;
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
index 12b7f71..49ba946 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
@@ -25,6 +25,7 @@ import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.controller.ExecuteActionRequest;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.ShortTaskStatus;
 import org.apache.ambari.server.state.State;
@@ -62,6 +63,8 @@ public class RequestStageContainer {
 
   private String requestContext = null;
 
+  private ExecuteActionRequest actionRequest = null;
+
   /**
    * Logger
    */
@@ -77,10 +80,25 @@ public class RequestStageContainer {
    * @param manager  action manager
    */
   public RequestStageContainer(Long id, List<Stage> stages, RequestFactory factory, ActionManager manager) {
+    this(id, stages, factory, manager, null);
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param id            request id
+   * @param stages        stages
+   * @param factory       request factory
+   * @param manager       action manager
+   * @param actionRequest action request
+   */
+  public RequestStageContainer(Long id, List<Stage> stages, RequestFactory factory, ActionManager manager,
+                               ExecuteActionRequest actionRequest) {
     this.id = id;
     this.stages = stages == null ? new ArrayList<Stage>() : stages;
     this.requestFactory = factory;
     this.actionManager = manager;
+    this.actionRequest = actionRequest;
   }
 
   /**
@@ -183,7 +201,9 @@ public class RequestStageContainer {
    */
   public void persist() throws AmbariException {
     if (!stages.isEmpty()) {
-      Request request = requestFactory.createNewFromStages(stages);
+      Request request = (null == actionRequest)
+          ? requestFactory.createNewFromStages(stages)
+          : requestFactory.createNewFromStages(stages, actionRequest);
 
       if (null != requestContext) {
         request.setRequestContext(requestContext);
@@ -193,7 +213,7 @@ public class RequestStageContainer {
         if (LOG.isDebugEnabled()) {
           LOG.debug(String.format("Triggering Action Manager, request=%s", request));
         }
-        actionManager.sendActions(request, null);
+        actionManager.sendActions(request, actionRequest);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/kerberos.json b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/kerberos.json
new file mode 100644
index 0000000..6ab7610
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/kerberos.json
@@ -0,0 +1,17 @@
+{
+  "services": [
+    {
+      "name": "KERBEROS",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "components": [
+        {
+          "name": "KERBEROS_CLIENT"
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/main/resources/stacks/HDP/2.2/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.2/kerberos.json
index de5f1a9..46aff38 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/kerberos.json
@@ -1,6 +1,6 @@
 {
   "properties": {
-    "realm": "${cluster-env/kerberos_domain}",
+    "realm": "${krb5-conf/realm}",
     "keytab_dir": "/etc/security/keytabs"
   },
   "identities": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
index 5e933d2..6f2699b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
@@ -32,6 +32,7 @@ import junit.framework.Assert;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -61,7 +62,8 @@ import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
 
 @RunWith(MockitoJUnitRunner.class)
-public class AmbariCustomCommandExecutionHelperTest {
+public class
+    AmbariCustomCommandExecutionHelperTest {
   private Injector injector;
   private AmbariManagementController controller;
   private AmbariMetaInfo ambariMetaInfo;
@@ -70,7 +72,7 @@ public class AmbariCustomCommandExecutionHelperTest {
   
   private static final String REQUEST_CONTEXT_PROPERTY = "context";
   
-  @Captor ArgumentCaptor<List<Stage>> stagesCaptor;
+  @Captor ArgumentCaptor<Request> requestCapture;
   @Mock ActionManager am;
   
   @Before
@@ -123,12 +125,13 @@ public class AmbariCustomCommandExecutionHelperTest {
       
       controller.createAction(actionRequest, requestProperties);
       
-      Mockito.verify(am, Mockito.times(1)).sendActions(stagesCaptor.capture(), any(ExecuteActionRequest.class));
-      
-      
-      List<Stage> stages = stagesCaptor.getValue();
-      Assert.assertEquals(1, stages.size());
-      Stage stage = stages.get(0);
+      Mockito.verify(am, Mockito.times(1)).sendActions(requestCapture.capture(), any(ExecuteActionRequest.class));
+
+      Request request = requestCapture.getValue();
+      Assert.assertNotNull(request);
+      Assert.assertNotNull(request.getStages());
+      Assert.assertEquals(1, request.getStages().size());
+      Stage stage = request.getStages().iterator().next();
       
       Assert.assertEquals(1, stage.getHosts().size());
       
@@ -175,12 +178,14 @@ public class AmbariCustomCommandExecutionHelperTest {
 
       //clusters.getHost("c6402").setState(HostState.HEARTBEAT_LOST);
 
-      Mockito.verify(am, Mockito.times(1)).sendActions(stagesCaptor.capture(), any(ExecuteActionRequest.class));
+      Mockito.verify(am, Mockito.times(1)).sendActions(requestCapture.capture(), any(ExecuteActionRequest.class));
 
-      List<Stage> stages = stagesCaptor.getValue();
-      Assert.assertEquals(1, stages.size());
+      Request request = requestCapture.getValue();
+      Assert.assertNotNull(request);
+      Assert.assertNotNull(request.getStages());
+      Assert.assertEquals(1, request.getStages().size());
+      Stage stage = request.getStages().iterator().next();
 
-      Stage stage = stages.get(0);
        // Check if was generated command, one for each host
       Assert.assertEquals(2, stage.getHostRoleCommands().size());
     }catch (Exception e) {
@@ -217,12 +222,14 @@ public class AmbariCustomCommandExecutionHelperTest {
 
       controller.createAction(actionRequest, requestProperties);
 
-      Mockito.verify(am, Mockito.times(1)).sendActions(stagesCaptor.capture(), any(ExecuteActionRequest.class));
+      Mockito.verify(am, Mockito.times(1)).sendActions(requestCapture.capture(), any(ExecuteActionRequest.class));
 
-      List<Stage> stages = stagesCaptor.getValue();
-      Assert.assertEquals(1, stages.size());
+      Request request = requestCapture.getValue();
+      Assert.assertNotNull(request);
+      Assert.assertNotNull(request.getStages());
+      Assert.assertEquals(1, request.getStages().size());
+      Stage stage = request.getStages().iterator().next();
 
-      Stage stage = stages.get(0);
       // Check if was generated command for one health host
       Assert.assertEquals(1, stage.getHostRoleCommands().size());
     }catch (Exception e) {
@@ -260,12 +267,14 @@ public class AmbariCustomCommandExecutionHelperTest {
 
       controller.createAction(actionRequest, requestProperties);
 
-      Mockito.verify(am, Mockito.times(1)).sendActions(stagesCaptor.capture(), any(ExecuteActionRequest.class));
+      Mockito.verify(am, Mockito.times(1)).sendActions(requestCapture.capture(), any(ExecuteActionRequest.class));
 
-      List<Stage> stages = stagesCaptor.getValue();
-      Assert.assertEquals(1, stages.size());
+      Request request = requestCapture.getValue();
+      Assert.assertNotNull(request);
+      Assert.assertNotNull(request.getStages());
+      Assert.assertEquals(1, request.getStages().size());
+      Stage stage = request.getStages().iterator().next();
 
-      Stage stage = stages.get(0);
       // Check if was generated command for one health host
       Assert.assertEquals(1, stage.getHostRoleCommands().size());
     }catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/339e8a76/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
index 2b00f40..a0e358a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
@@ -31,6 +31,7 @@ import junit.framework.Assert;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
 import org.apache.ambari.server.agent.ExecutionCommand;
@@ -70,7 +71,7 @@ public class BackgroundCustomCommandExecutionTest {
   
   private static final String REQUEST_CONTEXT_PROPERTY = "context";
   
-  @Captor ArgumentCaptor<List<Stage>> stagesCaptor;
+  @Captor ArgumentCaptor<Request> requestCapture;
   @Mock ActionManager am;
   
   @Before
@@ -123,12 +124,13 @@ public class BackgroundCustomCommandExecutionTest {
       
       controller.createAction(actionRequest, requestProperties);
       
-      Mockito.verify(am, Mockito.times(1)).sendActions(stagesCaptor.capture(), any(ExecuteActionRequest.class));
-      
-      
-      List<Stage> stages = stagesCaptor.getValue();
-      Assert.assertEquals(1, stages.size());
-      Stage stage = stages.get(0);
+      Mockito.verify(am, Mockito.times(1)).sendActions(requestCapture.capture(), any(ExecuteActionRequest.class));
+
+      Request request = requestCapture.getValue();
+      Assert.assertNotNull(request);
+      Assert.assertNotNull(request.getStages());
+      Assert.assertEquals(1, request.getStages().size());
+      Stage stage = request.getStages().iterator().next();
       
       System.out.println(stage);