You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by yu...@apache.org on 2014/05/31 09:21:08 UTC

[04/12] AMBARI-5482. Integrate Ambari Shell. (Janos Matyas and Krisztian Horvath via yusaku)

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/main/python/ambari_client/resources/stacks.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/main/python/ambari_client/resources/stacks.py b/ambari-client/src/main/python/ambari_client/resources/stacks.py
deleted file mode 100755
index f4ef518..0000000
--- a/ambari-client/src/main/python/ambari_client/resources/stacks.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-# 
-#      http://www.apache.org/licenses/LICENSE-2.0
-# 
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-from ambari_client.model import   stack , component
-
-__docformat__ = "epytext"
-
-
-def _get_config(root_resource, version, service_name):
-  """
-  Get service configurations from stack
-  @param version: The HDP version.
-  @param service_name: service name
-  @return: A ConfigModel object
-  """
-  return stack._get_configuration_from_stack(root_resource, version, service_name)
-
-
-def _get_components(root_resource, version, service_name):
-  """
-  Get service components from stack
-  @param version: The HDP version.
-  @param service_name: service name
-  @return: A ComponentModel object
-  """
-  return stack._get_components_from_stack(root_resource, version, service_name)
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/main/python/setup.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/main/python/setup.py b/ambari-client/src/main/python/setup.py
deleted file mode 100755
index 24942ad..0000000
--- a/ambari-client/src/main/python/setup.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-# 
-#      http://www.apache.org/licenses/LICENSE-2.0
-# 
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-
-from setuptools import setup, find_packages
-
-from sys import version_info, platform
-
-if version_info[:2] > (2, 5):
-    install_requires = []
-else:
-    install_requires = ['simplejson >= 2.0.0']
-
-# Python 2.6 and below requires argparse
-if version_info[:2] < (2, 7):
-    install_requires += ['argparse']
-
-setup(
-  name = 'ambari_client',
-  author_email = "ambari-dev@incubator.apache.org",
-  version = "1.0.3-SNAPSHOT",
-  packages = ['ambari_client'],
-  install_requires = install_requires,
-  description = 'Ambari python REST API client',
-  license = 'Apache License 2.0'
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/packages/tarball/all.xml
----------------------------------------------------------------------
diff --git a/ambari-client/src/packages/tarball/all.xml b/ambari-client/src/packages/tarball/all.xml
deleted file mode 100755
index 0e4f34b..0000000
--- a/ambari-client/src/packages/tarball/all.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1"
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <!--This 'all' id is not appended to the produced bundle because we do this:
-    http://maven.apache.org/plugins/maven-assembly-plugin/faq.html#required-classifiers
-  -->
-  <formats>
-    <format>dir</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>src/main/python</directory>
-      <outputDirectory>/</outputDirectory>
-    </fileSet>
-  </fileSets>
-</assembly>

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/TestAmbariClient.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestAmbariClient.py b/ambari-client/src/test/python/TestAmbariClient.py
deleted file mode 100755
index 1831f89..0000000
--- a/ambari-client/src/test/python/TestAmbariClient.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-
-from mock.mock import MagicMock, patch
-from ambari_client.ambari_api import  AmbariClient 
-from HttpClientInvoker import HttpClientInvoker
-from ambari_client.model.stack import StackConfigModel, StackComponentModel
-import unittest
-import logging
-
-class TestAmbariClient(unittest.TestCase):
-
-  def setUp(self):
-    http_client_logger = logging.getLogger()
-    http_client_logger.info('Running test:' + self.id())
-
-  def create_client(self, http_client_mock = MagicMock()):
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    return client
-
-  def test_init(self):
-    """
-    AmbariClient is the top-level root resources.
-    This testcase checks if when the  init method was called &
-    the httpclient was initialized
-    """
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1)
-    self.assertEqual(client.version, 1, "version should be 1")
-    self.assertEqual(client.host_url, "http://localhost:8080/api/v1",
-                       "host_url should be http://localhost:8080/api/v1")
-      
-    client = AmbariClient(host_name="localhost", user_name="admin", password="admin")
-    self.assertEqual(client.version, 1, "version should be 1")
-    self.assertEqual(client.host_url, "http://localhost:8080/api/v1",
-                       "host_url should be http://localhost:8080/api/v1")
-      
-    client = AmbariClient(host_name="localhost")
-    self.assertEqual(client.version, 1, "version should be 1")
-    self.assertEqual(client.host_url, "http://localhost:8080/api/v1",
-                       "host_url should be http://localhost:8080/api/v1")
-    
-    
-    client = AmbariClient("localhost", 8443, "admin", "admin", use_https=True)
-    self.assertEqual(client.version, 1, "version should be 1")
-    self.assertEqual(client.host_url, "https://localhost:8443/api/v1",
-                       "host_url should be https://localhost:8443/api/v1")
-      
-  def test_get_all_clusters(self):
-    """
-    Get all clusters.
-    This testcase checks if get_all_clusters returns a list of ModelList.
-    """
-    expected_output = {'items': [{'cluster_name': u'test1', 'version': u'HDP-1.2.1'}]}
-      
-    client = self.create_client()
-    all_clusters = client.get_all_clusters()
-      
-    self.assertEqual(len(all_clusters), 1)
-    self.assertEqual(all_clusters.to_json_dict(), expected_output)
-    
-  def test_get_cluster(self):
-    """
-    Get all clusters.
-    This testcase checks if get_all_clusters returns a list of ModelList.
-    """
-    expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'}
-    
-    client = self.create_client()
-    cluster = client.get_cluster('test1')
-    
-    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
-    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
-    
-  def test_get_host(self):
-    """
-    Get host
-    This testcase checks if client.get_host returns a correct host
-    """
-    expected_dict_output = {'ip': '10.0.2.15', 'host_name': 'dev06.hortonworks.com', 'rack_info': '/default-rack'}
-    
-    client = self.create_client()
-    host = client.get_host('dev06.hortonworks.com')
-    
-    self.assertEqual(host.to_json_dict(), expected_dict_output)
-    self.assertEqual(host.host_state, "HEARTBEAT_LOST")
-     
-  def test_get_all_hosts(self):
-    """
-    Get all hosts.
-    This testcase checks if get_all_hosts returns a list of ModelList.
-    """
-    expected_hosts_dict = {'items': [{'ip': None, 'host_name': u'apspal44-83', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-84', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-85', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-86', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-87', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-88', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-89', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01hn01', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01mgt', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01wn01', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01wn02', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01wn03', 'rack_info': '/default-rack'}]}
-      
-    client = self.create_client()
-    all_hosts = client.get_all_hosts()
-    
-    self.assertEqual(len(all_hosts), 12, "There should be 12 hosts from the response")
-    self.assertEqual(all_hosts.to_json_dict(), expected_hosts_dict)
-    
-  def test_bootstrap_hosts(self):
-    """
-    Test Bootstrap
-    """
-    http_client_mock = MagicMock()
-    
-    ssh_key = 'abc!@#$%^&*()_:"|<>?[];\'\\./'
-    host_list = ['dev05.hortonworks.com','dev06.hortonworks.com']
-    
-    expected_path = '//bootstrap'
-    expected_headers = {'Content-Type': 'application/json'}
-    expected_request = {'hosts': host_list, 'sshKey': 'abc!@#$%^&*()_:"|<>?[];\\\'\\\\./'}
-    expected_response = {'status': 201, 'message': u'Running Bootstrap now.', 'requestId': 5}
-                               
-    client = self.create_client(http_client_mock)
-    resp = client.bootstrap_hosts(host_list, ssh_key)
-
-    self.assertEqual(resp.to_json_dict(), expected_response)
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=expected_headers, payload=expected_request)
-  
-  def test_create_cluster(self):
-    """
-    Test create cluster
-    """
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/c1'
-    expected_request = {'Clusters': {'version': 'HDP-2.0.5'}}
-          
-    client = self.create_client(http_client_mock)
-    resp = client.create_cluster('c1', 'HDP-2.0.5')
-    
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
-    
-  def test_delete_cluster(self):
-    """
-    Test create cluster
-    """
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/c1'
-    expected_request = None
-          
-    client = self.create_client(http_client_mock)
-    resp = client.delete_cluster('c1')
-    
-    http_client_mock.invoke.assert_called_with('DELETE', expected_path, headers=None, payload=expected_request)
-    
-  def test_delete_host(self):
-    """
-    Test delete host
-    """
-    http_client_mock = MagicMock()
-    
-    expected_path = '//hosts/abc.abc.abc'
-    expected_request = None
-          
-    client = self.create_client(http_client_mock)
-    resp = client.delete_host('abc.abc.abc')
-    
-    http_client_mock.invoke.assert_called_with('DELETE', expected_path, headers=None, payload=expected_request)
-    
-  def test_get_config(self):
-    """
-    Test get config
-    """
-    expected_dict = {'items': [{'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'datanode_du_reserved', 'property_value': u'1'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.access.time.precision', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.balance.bandwidthPerSec', 'property_value': u'6250000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.block.access.token.enable', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.block.size', 'property_value': u'134217728'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.blockreport.initialDelay', 'property_value': u'120'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.cluster.administrators', 'property_value': u' hdfs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'df
 s.datanode.du.pct', 'property_value': u'0.85f'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.failed.volumes.tolerated', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.ipc.address', 'property_value': u'0.0.0.0:8010'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.max.xcievers', 'property_value': u'4096'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.socket.write.timeout', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.heartbeat.interval', 'property_value': u'3'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.https.port', 'property_value': u'50470'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.avoid.read.stale.datanode', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'servi
 ce_name': u'HDFS', 'property_name': u'dfs.namenode.avoid.write.stale.datanode', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.handler.count', 'property_value': u'100'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.handler.count', 'property_value': u'40'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.stale.datanode.interval', 'property_value': u'30000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.write.stale.datanode.ratio', 'property_value': u'1.0f'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.permissions', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.permissions.supergroup', 'property_value': u'hdfs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.replication.max', 'property_v
 alue': u'50'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.safemode.threshold.pct', 'property_value': u'1.0f'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.secondary.https.port', 'property_value': u'50490'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.umaskmode', 'property_value': u'077'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.web.ugi', 'property_value': u'gopher,gopher'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_block_local_path_access_user', 'property_value': u'hbase'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_data_dir', 'property_value': u'/hadoop/hdfs/data'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_address', 'property_value': u'50010'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_data_dir_perm'
 , 'property_value': u'750'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_failed_volume_tolerated', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_http_address', 'property_value': u'50075'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_name_dir', 'property_value': u'/hadoop/hdfs/namenode'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_replication', 'property_value': u'3'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_webhdfs_enabled', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dtnode_heapsize', 'property_value': u'1024'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.checkpoint.edits.dir', 'property_value': u'${fs.checkpoint.dir}'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs
 .checkpoint.period', 'property_value': u'21600'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.checkpoint.size', 'property_value': u'536870912'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.trash.interval', 'property_value': u'360'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs_checkpoint_dir', 'property_value': u'/hadoop/hdfs/namesecondary'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs_checkpoint_period', 'property_value': u'21600'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs_checkpoint_size', 'property_value': u'0.5'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hadoop.security.authentication', 'property_value': u'simple'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hadoop_heapsize', 'property_value': u'1024'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_
 name': u'hadoop_pid_dir_prefix', 'property_value': u'/var/run/hadoop'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hdfs_log_dir_prefix', 'property_value': u'/var/log/hadoop'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hdfs_user', 'property_value': u'hdfs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.compression.codec.lzo.class', 'property_value': u'com.hadoop.compression.lzo.LzoCodec'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.compression.codecs', 'property_value': u'org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.SnappyCodec'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.file.buffer.size', 'property_value': u'131072'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.
 serializations', 'property_value': u'org.apache.hadoop.io.serializer.WritableSerialization'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.client.connect.max.retries', 'property_value': u'50'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.client.connection.maxidletime', 'property_value': u'30000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.client.idlethreshold', 'property_value': u'8000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.server.max.response.size', 'property_value': u'5242880'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.server.read.threadpool.size', 'property_value': u'5'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'kerberos_domain', 'property_value': u'EXAMPLE.COM'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'keytab_path', 'property_value': u'/etc/securit
 y/keytabs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_formatted_mark_dir', 'property_value': u'/var/run/hadoop/hdfs/namenode/formatted/'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_heapsize', 'property_value': u'1024'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_opt_maxnewsize', 'property_value': u'640'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_opt_newsize', 'property_value': u'200'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'proxyuser_group', 'property_value': u'users'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.client.datanode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.client.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name
 ': u'security.datanode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.inter.datanode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.inter.tracker.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.job.submission.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.namenode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.task.umbilical.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security_enabled', 'property_value': u'false'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'webinterface.private.actions', 'property_value': u'false'}]}
-    expected_first_item = StackConfigModel(None, property_name='datanode_du_reserved' , property_value='1' , service_name='HDFS' , stack_version='1.3.0')    
-    expected_request = None
-              
-    client = self.create_client()
-    configs = client.get_config('1.3.0','HDFS')
-    
-        
-    self.assertEquals(len(configs), 75)
-    self.assertEquals(str(configs[0]),str(expected_first_item))
-    self.assertEquals(configs.to_json_dict(), expected_dict)
-    
-  def test_get_components(self):
-    """
-    Test get components
-    """
-    expected_dict = {'items': [{'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'DATANODE'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'HDFS_CLIENT'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'NAMENODE'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'SECONDARY_NAMENODE'}]}
-    expected_first_item = StackComponentModel(None, component_name='DATANODE', service_name='HDFS' , stack_version='1.3.0')    
-    expected_request = None
-              
-    client = self.create_client()
-    components = client.get_components('1.3.0','HDFS')
-        
-    self.assertEquals(len(components), 4)
-    self.assertEquals(str(components[0]),str(expected_first_item))
-    self.assertEquals(components.to_json_dict(), expected_dict)
-  
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/TestClusterModel.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestClusterModel.py b/ambari-client/src/test/python/TestClusterModel.py
deleted file mode 100644
index e558706..0000000
--- a/ambari-client/src/test/python/TestClusterModel.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-import logging
-
-from mock.mock import MagicMock, patch
-from HttpClientInvoker import HttpClientInvoker
-
-from ambari_client.ambari_api import  AmbariClient
-from ambari_client.model.host import HostModel
-from ambari_client.core.errors import BadRequest
-
-import unittest
-
-class TestClusterModel(unittest.TestCase):
-
-  def setUp(self):
-    http_client_logger = logging.getLogger()
-    http_client_logger.info('Running test:' + self.id())
-
-  def create_cluster(self, http_client_mock = MagicMock()):    
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    return client.get_cluster('test1')
-   
-  def test_get_service(self):
-    """
-    Get the service of a cluster
-    This testcase checks if get_service returns a list of ServiceModel.
-    """   
-    expected_dict_output = {'cluster_name': 'test1', 'version': 'HDP-1.2.1'}
-    
-    cluster = self.create_cluster()
-    serviceList = cluster.get_all_services()
-    ganglia = cluster.get_service("GANGLIA")  
-
-    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
-    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
-    self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
-    self.assertEqual(str(ganglia.state), "STARTED", "The ganglia service state should be fetched as STARTED")
-    self.assertEqual(ganglia.clusterRef.cluster_name, cluster.cluster_name, "The clusterRef value for  service  should be fetched ")
-      
-  def test_get_all_services(self):
-    """
-    Get all services of a cluster.
-    This testcase checks if get_all_services returns a list of ModelList.
-    """
-    expected_dict_output = {'cluster_name': 'test1', 'version': 'HDP-1.2.1'}
-    
-    cluster = self.create_cluster()
-    serviceList = cluster.get_all_services()
-    
-    self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
-    self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
-    self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
-
-  def test_get_all_hosts(self):
-    """
-    Get all cluster hosts
-    This testcase checks if get_all_services returns a list of ModelList.
-    """
-    expected_dict_output = {'items': [{'ip': '10.0.2.15', 'host_name': 'dev05.hortonworks.com', 'rack_info': '/default-rack'}, {'ip': '10.0.2.15', 'host_name': 'dev06.hortonworks.com', 'rack_info': '/default-rack'}]}
-
-    cluster = self.create_cluster()
-    hostlist = cluster.get_all_hosts()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    self.assertEqual(hostlist.to_json_dict(), expected_dict_output)
-    self.assertEqual(hostlist[1].host_name, 'dev06.hortonworks.com')
-    self.assertEqual(len(hostlist), 2)  
-
-  def test_get_host(self):
-    """
-    Get cluster host
-    This testcase checks if get_host returns correct HostModel
-    """
-    expected_dict_output = {'ip': '10.104.44.95', 'host_name': 'myhost', 'rack_info': '/default-rack'}
-    
-    cluster = self.create_cluster()
-    host = cluster.get_host('myhost')
-    
-    self.assertEqual(host.clusterRef.cluster_name, "test1")
-    self.assertEqual(host.to_json_dict(), expected_dict_output)
-    self.assertEqual(host.host_state, "HEALTHY")
-    self.assertEqual(host.public_host_name, "myhost")
-     
-  def test_get_global_config(self):
-    """
-    Get global config
-    This testcase checks if get_host returns correct HostModel
-    """
-    expected_dict_output = {'tag': 'version1', 'type': 'global'}
-    expected_properties = {'dfs_namenode_name_dir': '/hadoop/hdfs/namenode', 'security_enabled': 'false', 'proxyuser_group': 'users', 'hdfs_log_dir_prefix': '/var/log/hadoop', 'dfs_datanode_data_dir': '/hadoop/hdfs/data', 'namenode_formatted_mark_dir': '/var/run/hadoop/hdfs/namenode/formatted/', 'rrdcached_base_dir': '/var/lib/ganglia/rrds', 'user_group': 'hadoop', 'dfs_namenode_checkpoint_dir': '/hadoop/hdfs/namesecondary', 'dfs_namenode_checkpoint_period': '21600', 'hive_user': 'hive', 'fs_checkpoint_size': '0.5', 'hbase_conf_dir': '/etc/hbase', 'datanode_du_reserved': '1', 'dfs_datanode_http_address': '50075', 'namenode_heapsize': '1024m', 'dfs_webhdfs_enabled': 'true', 'oozie_user': 'oozie', 'hcat_conf_dir': '', 'hadoop_conf_dir': '/etc/hadoop/conf', 'dfs_replication': '3', 'namenode_opt_maxnewsize': '640m', 'apache_artifacts_download_url': '', 'dfs_datanode_address': '50010', 'dfs_exclude': 'dfs.exclude', 'yarn_user': 'yarn', 'gpl_artifacts_download_url': '', 'zk_user': 'zookee
 per', 'smokeuser': 'ambari-qa', 'dtnode_heapsize': '1024m', 'gmond_user': 'nobody', 'dfs_datanode_failed_volume_tolerated': '0', 'java64_home': '/usr/jdk/jdk1.6.0_31', 'run_dir': '/var/run/hadoop', 'ganglia_runtime_dir': '/var/run/ganglia/hdp', 'dfs_datanode_data_dir_perm': '750', 'hdfs_enable_shortcircuit_read': 'true', 'hdfs_user': 'hdfs', 'hbase_user': 'hbase', 'webhcat_user': 'hcat', 'gmetad_user': 'nobody', 'dfs_block_local_path_access_user': 'hbase', 'namenode_opt_newsize': '200m', 'mapred_user': 'mapred', 'nagios_group': 'nagios', 'hcat_user': 'hcat', 'hadoop_heapsize': '1024', 'hadoop_pid_dir_prefix': '/var/run/hadoop', 'nagios_user': 'nagios'}
-    
-    cluster = self.create_cluster()
-    global_config = cluster.get_global_config()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    self.assertEqual(global_config.properties, expected_properties)
-    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
-    
-  def test_get_core_site_config(self):
-    """
-    Get core-site config
-    """
-    expected_dict_output = {'tag': 'version1', 'type': 'core-site'}
-    expected_properties = {'io.serializations': 'org.apache.hadoop.io.serializer.WritableSerialization', 'fs.checkpoint.size': '0.5', 'fs.trash.interval': '360', 'hadoop.security.authentication': 'simple', 'io.compression.codecs': 'org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec', 'mapreduce.jobtracker.webinterface.trusted': 'false', 'hadoop.security.authorization': 'false', 'fs.checkpoint.edits.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.connection.maxidletime': '30000', 'ipc.client.connect.max.retries': '50', 'hadoop.security.auth_to_local': '\n        RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n        RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n        RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n        RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n        RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n        DEFAULT\n    ', 'io.file.buffer.size': '131072', 'dfs.namenode.checkpoint.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.idlethreshold': '8000', 'dfs.namenode.checkpoint.edits.dir'
 : '${dfs.namenode.checkpoint.dir}', 'fs.defaultFS': 'hdfs://dev05.hortonworks.com:8020', 'dfs.namenode.checkpoint.period': '21600'}
-        
-    cluster = self.create_cluster()
-    global_config = cluster.get_core_site_config()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    self.assertEqual(global_config.properties, expected_properties)
-    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
-    
-  def test_get_hdfs_site_config(self):
-    """
-    Get hdfs config
-    """
-    expected_dict_output = {'tag': 'version1', 'type': 'hdfs-site'}
-    expected_properties = {'dfs.namenode.avoid.write.stale.datanode': 'true', 'dfs.webhdfs.enabled': 'true', 'dfs.block.access.token.enable': 'true', 'dfs.datanode.address': '0.0.0.0:50010', 'dfs.cluster.administrators': ' hdfs', 'dfs.datanode.balance.bandwidthPerSec': '6250000', 'dfs.namenode.safemode.threshold-pct': '1.0f', 'dfs.permissions.enabled': 'true', 'dfs.client.read.shortcircuit': 'true', 'dfs.journalnode.edits.dir': '/grid/0/hdfs/journal', 'dfs.blocksize': '134217728', 'dfs.datanode.max.transfer.threads': '1024', 'dfs.datanode.du.reserved': '1', 'dfs.replication': '3', 'dfs.namenode.handler.count': '100', 'fs.permissions.umask-mode': '022', 'dfs.datanode.http.address': '0.0.0.0:50075', 'dfs.datanode.ipc.address': '0.0.0.0:8010', 'dfs.datanode.data.dir': '/hadoop/hdfs/data', 'dfs.namenode.http-address': 'dev05.hortonworks.com:50070', 'dfs.blockreport.initialDelay': '120', 'dfs.datanode.failed.volumes.tolerated': '0', 'dfs.namenode.accesstime.precision': '0', 'dfs.block.lo
 cal-path-access.user': 'hbase', 'dfs.https.namenode.https-address': 'dev05.hortonworks.com:50470', 'dfs.namenode.secondary.http-address': 'dev05.hortonworks.com:50090', 'dfs.namenode.stale.datanode.interval': '30000', 'dfs.heartbeat.interval': '3', 'dfs.client.read.shortcircuit.streams.cache.size': '4096', 'dfs.permissions.superusergroup': 'hdfs', 'dfs.journalnode.http-address': '0.0.0.0:8480', 'dfs.domain.socket.path': '/var/lib/hadoop-hdfs/dn_socket', 'dfs.namenode.avoid.read.stale.datanode': 'true', 'dfs.hosts.exclude': '/etc/hadoop/conf/dfs.exclude', 'dfs.datanode.data.dir.perm': '750', 'dfs.namenode.write.stale.datanode.ratio': '1.0f', 'dfs.replication.max': '50', 'dfs.namenode.name.dir': '/hadoop/hdfs/namenode'}
-        
-    cluster = self.create_cluster()
-    global_config = cluster.get_hdfs_site_config()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    self.assertEqual(global_config.properties, expected_properties)
-    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
-    
-  def test_get_mapred_site_config(self):
-    """
-    Get mapred config
-    """
-    expected_dict_output = {'tag': 'version1', 'type': 'mapred-site'}
-    expected_properties = {'mapreduce.jobhistory.address': 'dev05.hortonworks.com:10020', 'mapreduce.reduce.input.buffer.percent': '0.0', 'mapred.jobtracker.maxtasks.per.job': '-1', 'mapreduce.framework.name': 'yarn', 'mapreduce.map.speculative': 'false', 'mapreduce.tasktracker.healthchecker.script.path': 'file:////mapred/jobstatus', 'mapreduce.reduce.shuffle.merge.percent': '0.66', 'mapred.userlog.retain.hours': '24', 'yarn.app.mapreduce.am.resource.mb': '1024', 'mapreduce.reduce.shuffle.parallelcopies': '30', 'mapreduce.map.java.opts': '-Xmx320m', 'mapreduce.task.io.sort.factor': '100', 'mapreduce.application.classpath': '$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*', 'yarn.app.mapreduce.am.command-opts': '-Xmx756m', 'mapreduce.job.reduce.slowstart.completedmaps': '0.05', 'mapreduce.output.fileoutputformat.compress.type': 'BLOCK', 'mapreduce.reduce.speculative': 'false', 'mapreduce.reduce.java.opts': '-Xmx756m', 'mapreduce.am.max-at
 tempts': '2', 'yarn.app.mapreduce.am.admin-command-opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.jobtracker.system.dir': '/mapred/system', 'mapreduce.map.sort.spill.percent': '0.1', 'mapreduce.task.timeout': '600000', 'mapreduce.map.memory.mb': '1536', 'mapreduce.reduce.log.level': 'INFO', 'mapreduce.jobhistory.intermediate-done-dir': '/mr-history/tmp', 'mapreduce.reduce.memory.mb': '2048', 'mapreduce.tasktracker.map.tasks.maximum': '4', 'yarn.app.mapreduce.am.log.level': 'INFO', 'mapreduce.map.log.level': 'INFO', 'mapreduce.shuffle.port': '13562', 'mapred.jobtracker.taskScheduler': 'org.apache.hadoop.mapred.CapacityTaskScheduler', 'mapreduce.admin.user.env': 'LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &amp;&gt; /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`', 'mapreduce.jobhistory.webapp.address': 'dev05.hortonworks.com:19888', 'mapred.hosts.exclude
 ': '/etc/hadoop/conf/mapred.exclude', 'mapreduce.reduce.shuffle.input.buffer.percent': '0.7', 'yarn.app.mapreduce.am.staging-dir': '/user', 'mapred.hosts': '/etc/hadoop/conf/mapred.include', 'mapreduce.jobhistory.done-dir': '/mr-history/done', 'mapreduce.admin.reduce.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.task.io.sort.mb': '200', 'mapred.task.tracker.task-controller': 'org.apache.hadoop.mapred.DefaultTaskController', 'mapreduce.admin.map.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN'}
-        
-    cluster = self.create_cluster()
-    global_config = cluster.get_mapred_site_config()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    self.assertEqual(global_config.properties, expected_properties)
-    self.assertEqual(global_config.to_json_dict(), expected_dict_output)
-    
-  def test_update_global_config(self):
-    """
-    Update global config
-    """
-    http_client_mock = MagicMock()
-    
-    expected_properties = {'dfs_namenode_name_dir': 'abc', 'security_enabled': 'false', 'proxyuser_group': 'users', 'hdfs_log_dir_prefix': '/var/log/hadoop', 'dfs_datanode_data_dir': '/hadoop/hdfs/data', 'namenode_formatted_mark_dir': '/var/run/hadoop/hdfs/namenode/formatted/', 'rrdcached_base_dir': '/var/lib/ganglia/rrds', 'user_group': 'hadoop', 'dfs_namenode_checkpoint_dir': '/hadoop/hdfs/namesecondary', 'dfs_namenode_checkpoint_period': '21600', 'hive_user': 'hive', 'fs_checkpoint_size': '0.5', 'hbase_conf_dir': '/etc/hbase', 'datanode_du_reserved': '1', 'dfs_datanode_http_address': '50075', 'namenode_heapsize': '1024m', 'dfs_webhdfs_enabled': 'true', 'oozie_user': 'oozie', 'hcat_conf_dir': '', 'hadoop_conf_dir': '/etc/hadoop/conf', 'dfs_replication': '3', 'namenode_opt_maxnewsize': '640m', 'apache_artifacts_download_url': '', 'dfs_datanode_address': '50010', 'dfs_exclude': 'dfs.exclude', 'yarn_user': 'yarn', 'gpl_artifacts_download_url': '', 'zk_user': 'zookeeper', 'smokeuser':
  'ambari-qa', 'dtnode_heapsize': '1024m', 'gmond_user': 'nobody', 'dfs_datanode_failed_volume_tolerated': '0', 'java64_home': '/usr/jdk/jdk1.6.0_31', 'run_dir': '/var/run/hadoop', 'ganglia_runtime_dir': '/var/run/ganglia/hdp', 'dfs_datanode_data_dir_perm': '750', 'hdfs_enable_shortcircuit_read': 'true', 'hdfs_user': 'hdfs', 'hbase_user': 'hbase', 'webhcat_user': 'hcat', 'gmetad_user': 'nobody', 'dfs_block_local_path_access_user': 'hbase', 'namenode_opt_newsize': '200m', 'mapred_user': 'mapred', 'nagios_group': 'nagios', 'hcat_user': 'hcat', 'hadoop_heapsize': '1024', 'hadoop_pid_dir_prefix': '/var/run/hadoop', 'nagios_user': 'nagios'}
-    expected_put_path = '//clusters/test1'
-    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'global', 'properties':expected_properties}}}   
-    expected_get_path = '//clusters/test1/configurations?type=global&tag=version1'
-    expected_get_request = None
-        
-    cluster = self.create_cluster(http_client_mock)
-    existant_global_config = cluster.get_global_config()
-    existant_global_config.properties['dfs_namenode_name_dir'] = 'abc'
-    cluster.update_global_config(existant_global_config)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_any_call('PUT', expected_put_path, headers=None, payload=expected_post_request)
-    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)
- 
-  def test_update_core_site_config(self):
-    """
-    Update core-site config
-    """
-    http_client_mock = MagicMock()
-    
-    expected_properties = {'io.serializations': 'abc', 'fs.checkpoint.size': '0.5', 'fs.trash.interval': '360', 'hadoop.security.authentication': 'simple', 'io.compression.codecs': 'org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec', 'mapreduce.jobtracker.webinterface.trusted': 'false', 'hadoop.security.authorization': 'false', 'fs.checkpoint.edits.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.connection.maxidletime': '30000', 'ipc.client.connect.max.retries': '50', 'hadoop.security.auth_to_local': '\n        RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n        RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n        RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n        RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n        RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n        DEFAULT\n    ', 'io.file.buffer.size': '131072', 'dfs.namenode.checkpoint.dir': '/hadoop/hdfs/namesecondary', 'ipc.client.idlethreshold': '8000', 'dfs.namenode.checkpoint.edits.dir': '${dfs.namenode.checkpoint.dir}', 'fs.defaultFS'
 : 'hdfs://dev05.hortonworks.com:8020', 'dfs.namenode.checkpoint.period': '21600'}
-    expected_put_path = '//clusters/test1'
-    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'core-site', 'properties':expected_properties}}}   
-    expected_get_path = '//clusters/test1/configurations?type=core-site&tag=version1'
-    expected_get_request = None
-        
-    cluster = self.create_cluster(http_client_mock)
-    existant_global_config = cluster.get_core_site_config()
-    existant_global_config.properties['io.serializations'] = 'abc'
-    cluster.update_core_site_config(existant_global_config)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_any_call('PUT', expected_put_path, headers=None, payload=expected_post_request)
-    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)
-       
-  def test_update_hdfs_site_config(self):
-    """
-    Update hdfs-site config
-    """
-    http_client_mock = MagicMock()
-    
-    expected_properties = {'dfs.namenode.avoid.write.stale.datanode': 'abc', 'dfs.webhdfs.enabled': 'true', 'dfs.block.access.token.enable': 'true', 'dfs.datanode.address': '0.0.0.0:50010', 'dfs.cluster.administrators': ' hdfs', 'dfs.datanode.balance.bandwidthPerSec': '6250000', 'dfs.namenode.safemode.threshold-pct': '1.0f', 'dfs.permissions.enabled': 'true', 'dfs.client.read.shortcircuit': 'true', 'dfs.journalnode.edits.dir': '/grid/0/hdfs/journal', 'dfs.blocksize': '134217728', 'dfs.datanode.max.transfer.threads': '1024', 'dfs.datanode.du.reserved': '1', 'dfs.replication': '3', 'dfs.namenode.handler.count': '100', 'fs.permissions.umask-mode': '022', 'dfs.datanode.http.address': '0.0.0.0:50075', 'dfs.datanode.ipc.address': '0.0.0.0:8010', 'dfs.datanode.data.dir': '/hadoop/hdfs/data', 'dfs.namenode.http-address': 'dev05.hortonworks.com:50070', 'dfs.blockreport.initialDelay': '120', 'dfs.datanode.failed.volumes.tolerated': '0', 'dfs.namenode.accesstime.precision': '0', 'dfs.block.loc
 al-path-access.user': 'hbase', 'dfs.https.namenode.https-address': 'dev05.hortonworks.com:50470', 'dfs.namenode.secondary.http-address': 'dev05.hortonworks.com:50090', 'dfs.namenode.stale.datanode.interval': '30000', 'dfs.heartbeat.interval': '3', 'dfs.client.read.shortcircuit.streams.cache.size': '4096', 'dfs.permissions.superusergroup': 'hdfs', 'dfs.journalnode.http-address': '0.0.0.0:8480', 'dfs.domain.socket.path': '/var/lib/hadoop-hdfs/dn_socket', 'dfs.namenode.avoid.read.stale.datanode': 'true', 'dfs.hosts.exclude': '/etc/hadoop/conf/dfs.exclude', 'dfs.datanode.data.dir.perm': '750', 'dfs.namenode.write.stale.datanode.ratio': '1.0f', 'dfs.replication.max': '50', 'dfs.namenode.name.dir': '/hadoop/hdfs/namenode'}
-    expected_put_path = '//clusters/test1'
-    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'hdfs-site', 'properties':expected_properties}}}   
-    expected_get_path = '//clusters/test1/configurations?type=hdfs-site&tag=version1'
-    expected_get_request = None
-        
-    cluster = self.create_cluster(http_client_mock)
-    existant_global_config = cluster.get_hdfs_site_config()
-    existant_global_config.properties['dfs.namenode.avoid.write.stale.datanode'] = 'abc'
-    cluster.update_hdfs_site_config(existant_global_config)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_any_call('PUT', expected_put_path, headers=None, payload=expected_post_request)
-    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)\
-    
-  def test_update_mapred_site_config(self):
-    """
-    Update mapred-site config
-    """
-    http_client_mock = MagicMock()
-    
-    expected_properties = {'mapreduce.jobhistory.address': 'abc', 'mapreduce.reduce.input.buffer.percent': '0.0', 'mapred.jobtracker.maxtasks.per.job': '-1', 'mapreduce.framework.name': 'yarn', 'mapreduce.map.speculative': 'false', 'mapreduce.tasktracker.healthchecker.script.path': 'file:////mapred/jobstatus', 'mapreduce.reduce.shuffle.merge.percent': '0.66', 'mapred.userlog.retain.hours': '24', 'yarn.app.mapreduce.am.resource.mb': '1024', 'mapreduce.reduce.shuffle.parallelcopies': '30', 'mapreduce.map.java.opts': '-Xmx320m', 'mapreduce.task.io.sort.factor': '100', 'mapreduce.application.classpath': '$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*', 'yarn.app.mapreduce.am.command-opts': '-Xmx756m', 'mapreduce.job.reduce.slowstart.completedmaps': '0.05', 'mapreduce.output.fileoutputformat.compress.type': 'BLOCK', 'mapreduce.reduce.speculative': 'false', 'mapreduce.reduce.java.opts': '-Xmx756m', 'mapreduce.am.max-attempts': '2', 'yarn.app.
 mapreduce.am.admin-command-opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.jobtracker.system.dir': '/mapred/system', 'mapreduce.map.sort.spill.percent': '0.1', 'mapreduce.task.timeout': '600000', 'mapreduce.map.memory.mb': '1536', 'mapreduce.reduce.log.level': 'INFO', 'mapreduce.jobhistory.intermediate-done-dir': '/mr-history/tmp', 'mapreduce.reduce.memory.mb': '2048', 'mapreduce.tasktracker.map.tasks.maximum': '4', 'yarn.app.mapreduce.am.log.level': 'INFO', 'mapreduce.map.log.level': 'INFO', 'mapreduce.shuffle.port': '13562', 'mapred.jobtracker.taskScheduler': 'org.apache.hadoop.mapred.CapacityTaskScheduler', 'mapreduce.admin.user.env': 'LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &amp;&gt; /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`', 'mapreduce.jobhistory.webapp.address': 'dev05.hortonworks.com:19888', 'mapred.hosts.exclude': '/etc/hadoop/conf/map
 red.exclude', 'mapreduce.reduce.shuffle.input.buffer.percent': '0.7', 'yarn.app.mapreduce.am.staging-dir': '/user', 'mapred.hosts': '/etc/hadoop/conf/mapred.include', 'mapreduce.jobhistory.done-dir': '/mr-history/done', 'mapreduce.admin.reduce.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN', 'mapreduce.task.io.sort.mb': '200', 'mapred.task.tracker.task-controller': 'org.apache.hadoop.mapred.DefaultTaskController', 'mapreduce.admin.map.child.java.opts': '-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN'}
-    expected_put_path = '//clusters/test1'
-    expected_post_request = {'Clusters': {'desired_configs': {'tag': 'version1', 'type': 'mapred-site', 'properties':expected_properties}}}   
-    expected_get_path = '//clusters/test1/configurations?type=mapred-site&tag=version1'
-    expected_get_request = None
-        
-    cluster = self.create_cluster(http_client_mock)
-    existant_global_config = cluster.get_mapred_site_config()
-    existant_global_config.properties['mapreduce.jobhistory.address'] = 'abc'
-    cluster.update_mapred_site_config(existant_global_config)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_any_call('PUT', expected_put_path, headers=None, payload=expected_post_request)
-    http_client_mock.invoke.assert_any_call('GET', expected_get_path, headers=None, payload=expected_get_request)
-    
-  def test_create_services(self):
-    """
-    Create services
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/services'
-    expected_request = [{'ServiceInfo': {'service_name': 'HDFS'}}, {'ServiceInfo': {'service_name': 'YARN'}}, {'ServiceInfo': {'service_name': 'MAPREDUCEv2'}}, {'ServiceInfo': {'service_name': 'TEZ'}}]
-    
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.create_services(['HDFS','YARN','MAPREDUCEv2','TEZ'])
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
-    
-  def test_create_service_components(self):
-    """
-    Create service components
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/services/?ServiceInfo/service_name=HDFS'
-    expected_request = {'components': [{'ServiceComponentInfo': {'component_name': u'NODEMANAGER'}}, {'ServiceComponentInfo': {'component_name': u'RESOURCEMANAGER'}}, {'ServiceComponentInfo': {'component_name': u'YARN_CLIENT'}}]}
-    
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.create_service_components("2.0.5", "HDFS")
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
-    
-  def test_create_service_component(self):
-    """
-    Create service component
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/services/HDFS/components/NAMENODE'
-    
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.create_service_component("2.0.5", "HDFS","NAMENODE")
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=None)
-    
-  def test_create_hosts(self):
-    """
-    Create cluster hosts
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/hosts'
-    expected_request = [{'Hosts': {'ip': '1.2.3.4', 'host_name': 'hostname01', 'rack_info': '/default-rack'}}, {'Hosts': {'ip': '2.3.1.22', 'host_name': 'hostname02', 'rack_info': 'rack'}}]
-        
-    cluster = self.create_cluster(http_client_mock)
-    host_list = [HostModel(None, 'hostname01','1.2.3.4'), HostModel(None, 'hostname02','2.3.1.22','rack')]
-    resp = cluster.create_hosts(host_list)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
-    
-  def test_create_host(self):
-    """
-    Create cluster host
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/hosts'
-    expected_request = [{'Hosts': {'ip': '1.2.3.4', 'host_name': 'hostname01', 'rack_info': '/default-rack'}}]
-            
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.create_host('hostname01','1.2.3.4')
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
-    
-    
-  def test_delete_host(self):
-    """
-    Delete cluster host
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/hosts/hostname01'
-            
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.delete_host('hostname01')
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('DELETE', expected_path, headers=None, payload=None)
-    
-    
-  def test_exceptions(self):
-    """
-    Test exceptions from ambari.client.core.errors
-    """
-    cluster = self.create_cluster()
-    
-    try:
-      cluster.delete_host('deleted_nonexistant_cluster')
-      print http_client_mock.invoke.call_args_list
-      self.fail('Exception should have been thrown!')
-    except BadRequest, ex:
-      self.assertEquals(str(ex), 'exception: 400. Attempted to add unknown hosts to a cluster.  These hosts have not been registered with the server: dev05')
-    except Exception, ex:
-      self.fail('Wrong exception thrown!')
-    
-  def test_start_all_services(self):
-    """
-    Start all services
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/services?ServiceInfo/state=INSTALLED&params/run_smoke_test=true&params/reconfigure_client=false'
-    expected_request = {'RequestInfo': {'context': 'Start All Services'}, 'Body': {'ServiceInfo': {'state': 'STARTED'}}}
-            
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.start_all_services(True)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
-    
-  def test_stop_all_services(self):
-    """
-    Stop all services
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/services?ServiceInfo'
-    expected_request = {'RequestInfo': {'context': 'Stop All Services'}, 'Body': {'ServiceInfo': {'state': 'INSTALLED'}}}
-            
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.stop_all_services()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
-    
-    
-  def test_install_all_services(self):
-    """
-    Install all services
-    """   
-    http_client_mock = MagicMock()
-    
-    expected_path = '//clusters/test1/services?ServiceInfo/state=INSTALLED'
-    expected_request = {'RequestInfo': {'context': 'Install Services'}, 'Body': {'ServiceInfo': {'state': 'INSTALLED'}}}
-            
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.install_all_services()
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
-    
-  def test_add_config(self):
-    """
-    Set desired configurations
-    """   
-    http_client_mock = MagicMock()
-    
-    propr_dict = {"dfs_name_dir":"/data/1/hadoop/hdfs/namenode,/data/2/hadoop/hdfs/namenode,/data/3/hadoop/hdfs/namenode,/data/4/hadoop/hdfs/namenode,/data/5/hadoop/hdfs/namenode,/data/6/hadoop/hdfs/namenode,/data/7/hadoop/hdfs/namenode,/data/8/hadoop/hdfs/namenode", "namenode_heapsize":"1024m", "namenode_opt_newsize":"200m", "fs_checkpoint_dir":"/data/1/hadoop/hdfs/namesecondary", "dfs_data_dir":"/data/1/hadoop/hdfs/data,/data/2/hadoop/hdfs/data,/data/3/hadoop/hdfs/data,/data/4/hadoop/hdfs/data,/data/5/hadoop/hdfs/data,/data/6/hadoop/hdfs/data,/data/7/hadoop/hdfs/data,/data/8/hadoop/hdfs/data,/data/9/hadoop/hdfs/data,/data/10/hadoop/hdfs/data", "dtnode_heapsize":"1024m", "dfs_datanode_failed_volume_tolerated":"0", "dfs_webhdfs_enabled":"true", "hadoop_heapsize":"1024", "datanode_du_reserved":"0", "fs_checkpoint_period":"21600", "fs_checkpoint_size":"67108864", "hdfs_log_dir_prefix":"/var/log/hadoop", "hadoop_pid_dir_prefix":"/var/run/hadoop", "namenode_opt_maxnewsize":"200m", "dfs_
 exclude":"dfs.exclude", "dfs_include":"dfs.include", "dfs_replication":"3", "dfs_block_local_path_access_user":"hbase", "dfs_datanode_data_dir_perm":"750", "security_enabled":"false", "namenode_formatted_mark_dir":"/var/run/hadoop/hdfs/namenode/formatted/", "hcat_conf_dir":"", "jtnode_opt_newsize":"200m", "jtnode_opt_maxnewsize":"200m", "jtnode_heapsize":"1024m", "mapred_local_dir":"/data/1/hadoop/mapred,/data/2/hadoop/mapred,/data/3/hadoop/mapred,/data/4/hadoop/mapred,/data/5/hadoop/mapred,/data/6/hadoop/mapred,/data/7/hadoop/mapred,/data/8/hadoop/mapred,/data/9/hadoop/mapred,/data/10/hadoop/mapred", "mapred_map_tasks_max":"4", "mapred_red_tasks_max":"2", "mapred_child_java_opts_sz":"768", "scheduler_name":"org.apache.hadoop.mapred.CapacityTaskScheduler", "mapred_cluster_map_mem_mb":"1536", "mapred_cluster_red_mem_mb":"2048", "mapred_cluster_max_map_mem_mb":"6144", "mapred_cluster_max_red_mem_mb":"4096", "mapred_job_map_mem_mb":"1536", "mapred_job_red_mem_mb":"2048", "io_sort_mb":"
 200", "io_sort_spill_percent":"0.9", "mapreduce_userlog_retainhours":"24", "maxtasks_per_job":"-1", "lzo_enabled":"true", "snappy_enabled":"true", "rca_enabled":"true", "mapred_system_dir":"/mapred/system", "mapred_hosts_exclude":"mapred.exclude", "mapred_hosts_include":"mapred.include", "mapred_jobstatus_dir":"file:////mapred/jobstatus", "nagios_web_login":"nagiosadmin", "nagios_web_password":"admin", "nagios_contact":"admin@admin.com", "nagios_group":"nagios", "hbase_conf_dir":"/etc/hbase", "proxyuser_group":"users", "dfs_datanode_address":"50010", "dfs_datanode_http_address":"50075", "gpl_artifacts_download_url":"", "apache_artifacts_download_url":"", "ganglia_runtime_dir":"/var/run/ganglia/hdp", "java64_home":"/usr/jdk/jdk1.6.0_31", "run_dir":"/var/run/hadoop", "hadoop_conf_dir":"/etc/hadoop", "hdfs_user":"hdfs", "mapred_user":"mapred", "hbase_user":"hbase", "hive_user":"hive", "hcat_user":"hcat", "webhcat_user":"hcat", "oozie_user":"oozie", "zk_user":"zookeeper", "gmetad_user":
 "nobody", "gmond_user":"nobody", "nagios_user":"nagios", "smokeuser":"ambari-qa", "user_group":"hadoop", "rrdcached_base_dir":"/var/lib/ganglia/rrds"} 
-    expected_path = '//clusters/test1'
-    expected_request = {'Clusters': {'desired_configs': {'tag':'version1', 'type':'global', 'properties':propr_dict}}}
-                
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.add_config("global","version1",propr_dict)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
-    
-  def test_create_config(self):
-    """
-    Add a configuration
-    """   
-    http_client_mock = MagicMock()
-    
-    propr_dict = {"dfs_name_dir":"/data/1/hadoop/hdfs/namenode,/data/2/hadoop/hdfs/namenode,/data/3/hadoop/hdfs/namenode,/data/4/hadoop/hdfs/namenode,/data/5/hadoop/hdfs/namenode,/data/6/hadoop/hdfs/namenode,/data/7/hadoop/hdfs/namenode,/data/8/hadoop/hdfs/namenode", "namenode_heapsize":"1024m", "namenode_opt_newsize":"200m", "fs_checkpoint_dir":"/data/1/hadoop/hdfs/namesecondary", "dfs_data_dir":"/data/1/hadoop/hdfs/data,/data/2/hadoop/hdfs/data,/data/3/hadoop/hdfs/data,/data/4/hadoop/hdfs/data,/data/5/hadoop/hdfs/data,/data/6/hadoop/hdfs/data,/data/7/hadoop/hdfs/data,/data/8/hadoop/hdfs/data,/data/9/hadoop/hdfs/data,/data/10/hadoop/hdfs/data", "dtnode_heapsize":"1024m", "dfs_datanode_failed_volume_tolerated":"0", "dfs_webhdfs_enabled":"true", "hadoop_heapsize":"1024", "datanode_du_reserved":"0", "fs_checkpoint_period":"21600", "fs_checkpoint_size":"67108864", "hdfs_log_dir_prefix":"/var/log/hadoop", "hadoop_pid_dir_prefix":"/var/run/hadoop", "namenode_opt_maxnewsize":"200m", "dfs_
 exclude":"dfs.exclude", "dfs_include":"dfs.include", "dfs_replication":"3", "dfs_block_local_path_access_user":"hbase", "dfs_datanode_data_dir_perm":"750", "security_enabled":"false", "namenode_formatted_mark_dir":"/var/run/hadoop/hdfs/namenode/formatted/", "hcat_conf_dir":"", "jtnode_opt_newsize":"200m", "jtnode_opt_maxnewsize":"200m", "jtnode_heapsize":"1024m", "mapred_local_dir":"/data/1/hadoop/mapred,/data/2/hadoop/mapred,/data/3/hadoop/mapred,/data/4/hadoop/mapred,/data/5/hadoop/mapred,/data/6/hadoop/mapred,/data/7/hadoop/mapred,/data/8/hadoop/mapred,/data/9/hadoop/mapred,/data/10/hadoop/mapred", "mapred_map_tasks_max":"4", "mapred_red_tasks_max":"2", "mapred_child_java_opts_sz":"768", "scheduler_name":"org.apache.hadoop.mapred.CapacityTaskScheduler", "mapred_cluster_map_mem_mb":"1536", "mapred_cluster_red_mem_mb":"2048", "mapred_cluster_max_map_mem_mb":"6144", "mapred_cluster_max_red_mem_mb":"4096", "mapred_job_map_mem_mb":"1536", "mapred_job_red_mem_mb":"2048", "io_sort_mb":"
 200", "io_sort_spill_percent":"0.9", "mapreduce_userlog_retainhours":"24", "maxtasks_per_job":"-1", "lzo_enabled":"true", "snappy_enabled":"true", "rca_enabled":"true", "mapred_system_dir":"/mapred/system", "mapred_hosts_exclude":"mapred.exclude", "mapred_hosts_include":"mapred.include", "mapred_jobstatus_dir":"file:////mapred/jobstatus", "nagios_web_login":"nagiosadmin", "nagios_web_password":"admin", "nagios_contact":"admin@admin.com", "nagios_group":"nagios", "hbase_conf_dir":"/etc/hbase", "proxyuser_group":"users", "dfs_datanode_address":"50010", "dfs_datanode_http_address":"50075", "gpl_artifacts_download_url":"", "apache_artifacts_download_url":"", "ganglia_runtime_dir":"/var/run/ganglia/hdp", "java64_home":"/usr/jdk/jdk1.6.0_31", "run_dir":"/var/run/hadoop", "hadoop_conf_dir":"/etc/hadoop", "hdfs_user":"hdfs", "mapred_user":"mapred", "hbase_user":"hbase", "hive_user":"hive", "hcat_user":"hcat", "webhcat_user":"hcat", "oozie_user":"oozie", "zk_user":"zookeeper", "gmetad_user":
 "nobody", "gmond_user":"nobody", "nagios_user":"nagios", "smokeuser":"ambari-qa", "user_group":"hadoop", "rrdcached_base_dir":"/var/lib/ganglia/rrds"} 
-    expected_path = '//clusters/test1'
-    expected_request = {'tag':'version1', 'type':'global', 'properties':propr_dict}
-                
-    cluster = self.create_cluster(http_client_mock)
-    resp = cluster.create_config("global","version1",propr_dict)
-    
-    self.assertEqual(cluster.cluster_name, "test1")
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_request)
-    
-    

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/TestComponentModel.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestComponentModel.py b/ambari-client/src/test/python/TestComponentModel.py
deleted file mode 100644
index b3f1530..0000000
--- a/ambari-client/src/test/python/TestComponentModel.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-import logging
-
-from mock.mock import MagicMock, patch
-from HttpClientInvoker import HttpClientInvoker
-
-from ambari_client.ambari_api import  AmbariClient
-
-import unittest
-
-class TestClusterModel(unittest.TestCase):
-
-  def setUp(self):
-    http_client_logger = logging.getLogger()
-    http_client_logger.info('Running test:' + self.id())
-
-  def create_component(self, http_client_mock = MagicMock()):
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    cluster = client.get_cluster('test1')
-    host = cluster.get_host('myhost')
-    component = host.get_host_component("DATANODE")
-    return component
-
-  def test_component_get_metrics(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/cl1/hosts/myhost/host_components/DATANODE?fields=metrics'
-    expected_json_output = {u'HostRoles': {u'cluster_name': u'cl1', u'host_name': u'myhost', u'component_name': u'DATANODE'}, u'metrics': {u'load': {u'load_one': 0.0125555555556, u'load_five': 0.059277777777800002, u'load_fifteen': 0.069222222222199994}, u'ugi': {u'loginSuccess_avg_time': 0.0, u'loginFailure_avg_time': 0.0, u'loginSuccess_num_ops': 0.0, u'loginFailure_num_ops': 0.0}, u'network': {u'bytes_in': 30989.887416699999, u'pkts_in': 44.982222222200001, u'pkts_out': 214.35891666699999, u'bytes_out': 98799.674277800004}, u'process': {u'proc_total': 682.39722222199998, u'proc_run': 2.0}, u'dfs': {u'datanode': {u'replaceBlockOp_num_ops': 0.0, u'replaceBlockOp_avg_time': 0.0, u'blockChecksumOp_avg_time': 0.0, u'copyBlockOp_avg_time': 0.0, u'copyBlockOp_num_ops': 0.0, u'heartBeats_avg_time': 1.69166666667, u'writes_from_local_client': 0.0, u'blockReports_avg_time': 6.0, u'blocks_written': 0.0, u'writeBlockOp_num_ops': 0.0, u'bytes_read': 0.0, u'writeBlockOp_avg_time': 13.896907216
 500001, u'writes_from_remote_client': 0.0, u'blocks_read': 0.0, u'readBlockOp_avg_time': 0.0, u'reads_from_remote_client': 0.0, u'block_verification_failures': 0.0, u'reads_from_local_client': 0.0, u'blocks_removed': 0.0, u'blocks_get_local_pathinfo': 0.0, u'blockReports_num_ops': 0.0, u'heartBeats_num_ops': 0.33648148148099999, u'blocks_verified': 0.0, u'bytes_written': 0.0, u'readBlockOp_num_ops': 0.0, u'blocks_replicated': 0.0, u'blockChecksumOp_num_ops': 0.0}, u'FSNamesystem': {u'VolumeInfo': u'{"/hadoop/hdfs/data/current":{"freeSpace":495195869184,"usedSpace":345120768,"reservedSpace":1073741824}}', u'HttpPort': None, u'RpcPort': u'8010', u'NamenodeAddress': u'{"myhost":"BP-442795920-192.168.64.101-1383132565020"}', u'Version': u'2.2.0.2.0.6.0-76'}}, u'rpc': {u'NumOpenConnections': 0.0, u'RpcProcessingTime_avg_time': 0.0, u'rpcAuthorizationFailures': 0.0, u'callQueueLen': 0.0, u'RpcProcessingTime_num_ops': 0.0, u'RpcQueueTime_avg_time': 0.0, u'rpcAuthorizationSuccesses': 0.0, u
 'rpcAuthenticationSuccesses': 0.0, u'rpcAuthenticationFailures': 0.0, u'ReceivedBytes': 0.0, u'RpcQueueTime_num_ops': 0.0, u'SentBytes': 0.0}, u'boottime': 1383131209.0, u'jvm': {u'NonHeapMemoryMax': 136314880, u'logWarn': 0.0, u'gcCount': 0.011111111111100001, u'threadsRunnable': 8.0416666666700003, u'memHeapCommittedM': 28.5625, u'threadsWaiting': 18.0, u'NonHeapMemoryUsed': 30798600, u'threadsTimedWaiting': 8.9166666666700003, u'threadsNew': 0.0, u'HeapMemoryUsed': 11395264, u'memHeapUsedM': 11.7175731111, u'memNonHeapUsedM': 29.360076750000001, u'threadsTerminated': 0.0, u'logInfo': 0.0, u'logError': 0.0, u'HeapMemoryMax': 1037959168, u'threadsBlocked': 0.0, u'logFatal': 0.0, u'memNonHeapCommittedM': 29.625, u'gcTimeMillis': 388}, u'memory': {u'mem_cached': 160191.85555599999, u'swap_free': 2593920.0, u'mem_free': 183983.85555599999, u'mem_buffers': 23914.266666700001, u'mem_shared': 0.0, u'swap_total': 2621432.0, u'mem_total': 1922680.0}, u'disk': {u'disk_total': 525.7899999999
 9996, u'disk_free': 495.64499999999998, u'part_max_used': 11.6}, u'cpu': {u'cpu_idle': 85.730000000000004, u'cpu_num': 1.0, u'cpu_wio': 0.0041666666666699999, u'cpu_user': 7.4288888888900004, u'cpu_aidle': 0.0, u'cpu_system': 6.8458333333299999, u'cpu_speed': 2967.0, u'cpu_nice': 0.0}}, u'host': {u'href': u'http://192.168.64.101:8080/api/v1/clusters/cl1/hosts/myhost'}, u'href': u'http://192.168.64.101:8080/api/v1/clusters/cl1/hosts/myhost/host_components/DATANODE?fields=metrics'}
-
-    component = self.create_component(http_client_mock)
-    metrics_json = component.get_metrics()
-
-    self.assertEqual(expected_json_output,metrics_json)
-    http_client_mock.invoke.assert_called_with('GET', expected_path, headers=None, payload=None)
-    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/TestHostModel.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestHostModel.py b/ambari-client/src/test/python/TestHostModel.py
deleted file mode 100644
index 8b3b531..0000000
--- a/ambari-client/src/test/python/TestHostModel.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-import logging
-
-from mock.mock import MagicMock, patch
-from HttpClientInvoker import HttpClientInvoker
-
-from ambari_client.ambari_api import  AmbariClient
-
-import unittest
-
-class TestHostModel(unittest.TestCase):
-
-  def setUp(self):
-    http_client_logger = logging.getLogger()
-    http_client_logger.info('Running test:' + self.id())
-
-  def create_host(self, http_client_mock = MagicMock()):
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    cluster = client.get_cluster('test1')
-    host = cluster.get_host('myhost')
-    return host
-
-  def test_get_host_components(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/hosts/myhost/host_components?fields=HostRoles/state'
-
-    host = self.create_host(http_client_mock)
-    host_components = host.get_host_components()
-
-    self.assertEqual(host_components[0].component_name,"DATANODE")
-    self.assertEqual(host_components[0].state,"STARTED")
-    self.assertEqual(host_components[3].component_name,"HBASE_MASTER")
-    self.assertEqual(host_components[3].state,"STARTED")
-    http_client_mock.invoke.assert_called_with('GET', expected_path, headers=None, payload=None)
-
-  def test_get_host_component(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/hosts/myhost/host_components/DATANODE'
-
-    host =  self.create_host(http_client_mock)
-    component = host.get_host_component("DATANODE")
-
-    self.assertEqual(component.component_name,"DATANODE")
-    self.assertEqual(component.state,"STARTED")
-    self.assertEqual(component.host_name,"myhost")
-
-    http_client_mock.invoke.assert_called_with('GET', expected_path, headers=None, payload=None)
-
-  def test_assign_role(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/hosts?Hosts/host_name=myhost'
-    expected_payload = {'host_components': [{'HostRoles': {'component_name': 'GANGLIA_SERVER'}}]}
-
-    host =  self.create_host(http_client_mock)
-    status = host.assign_role("GANGLIA_SERVER")
-
-    self.assertTrue(status.status, 201)
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_payload)

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/TestServiceModel.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestServiceModel.py b/ambari-client/src/test/python/TestServiceModel.py
deleted file mode 100644
index 6933701..0000000
--- a/ambari-client/src/test/python/TestServiceModel.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-import logging
-
-from mock.mock import MagicMock, patch
-from HttpClientInvoker import HttpClientInvoker
-
-from ambari_client.ambari_api import  AmbariClient
-
-import unittest
-
-class TestServiceModel(unittest.TestCase):
-
-  def setUp(self):
-    http_client_logger = logging.getLogger()
-    http_client_logger.info('Running test:' + self.id())
-
-  def create_service(self, http_client_mock = MagicMock()):
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    cluster = client.get_cluster('test1')
-    service = cluster.get_service('GANGLIA')
-    return service
-
-  def test_start(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/services/GANGLIA'
-    expected_payload = {'ServiceInfo': {'state': 'STARTED'}}
-
-    service = self.create_service(http_client_mock)
-    status = service.start()
-
-    self.assertEqual(status.get_request_path(), 'clusters/test1/requests/19')
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_payload)
-
-  def test_stop(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/services/GANGLIA'
-    expected_payload = {"ServiceInfo": {"state": "INSTALLED"}}
-
-    service = self.create_service(http_client_mock)
-    status = service.stop()
-
-    self.assertEqual(status.get_request_path(), 'clusters/test1/requests/19')
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_payload)
-
-  def test_install(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/services/GANGLIA'
-    expected_payload = {"ServiceInfo": {"state": "INSTALLED"}}
-
-    service = self.create_service(http_client_mock)
-    status = service.install()
-
-    self.assertEqual(status.get_request_path(), 'clusters/test1/requests/19')
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_payload)
-
-  def test_get_service_components(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/services/GANGLIA/components?fields=*'
-
-    service = self.create_service(http_client_mock)
-    components = service.get_service_components()
-
-    self.assertEqual(components[0].component_name, "GANGLIA_MONITOR")
-    self.assertEqual(components[0].state, "STARTED")
-    self.assertEqual(components[1].component_name, "GANGLIA_SERVER")
-    self.assertEqual(components[1].state, "INSTALLED")
-
-    http_client_mock.invoke.assert_called_with('GET', expected_path, headers=None, payload=None)
-
-  def test_get_service_component(self):
-    http_client_mock = MagicMock()
-
-    expected_path = '//clusters/test1/services/GANGLIA/components/GANGLIA_SERVER'
-
-    service = self.create_service(http_client_mock)
-    component = service.get_service_component("GANGLIA_SERVER")
-
-    self.assertEqual(component.component_name, "GANGLIA_SERVER")
-    self.assertEqual(component.service_name, "GANGLIA")
-    self.assertEqual(component.state, "STARTED")
-
-    http_client_mock.invoke.assert_called_with('GET', expected_path, headers=None, payload=None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/TestStatusModel.py
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/TestStatusModel.py b/ambari-client/src/test/python/TestStatusModel.py
deleted file mode 100644
index 7f88771..0000000
--- a/ambari-client/src/test/python/TestStatusModel.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-import logging
-
-from ambari_client.model.status import StatusModel
-from mock.mock import MagicMock, patch
-from HttpClientInvoker import HttpClientInvoker
-
-from ambari_client.ambari_api import AmbariClient
-import unittest
-
-class TestStatusModel(unittest.TestCase):
-
-  def setUp(self):
-    http_client_logger = logging.getLogger()
-    http_client_logger.info('Running test:' + self.id())
-
-  def create_service(self, http_client_mock = MagicMock()):
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    cluster = client.get_cluster('test1')
-    service = cluster.get_service('GANGLIA')
-    return service
-
-  def create_client(self, http_client_mock = MagicMock()):
-    http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
-    client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
-    return client
-
-  def test_get_request_path(self):
-    http_client_mock = MagicMock()
-
-    expected_payload = {'ServiceInfo': {'state': 'INSTALLED'}}
-    expected_path = '//clusters/test1/services/GANGLIA'
-    expected_request_path = 'clusters/test1/requests/19'
-
-    service = self.create_service(http_client_mock)
-    status = service.stop()
-
-    self.assertEqual(status.get_request_path(), expected_request_path)
-    http_client_mock.invoke.assert_called_with('PUT', expected_path, headers=None, payload=expected_payload)
-
-
-  def test_is_error(self):
-    error_model = StatusModel(None, 400)
-    ok_model =  StatusModel(None, 201)
-
-    self.assertTrue(error_model.is_error())
-    self.assertFalse(ok_model.is_error())
-
-  def test_get_bootstrap_path(self):
-    http_client_mock = MagicMock()
-
-    ssh_key = 'abc!@#$%^&*()_:"|<>?[];\'\\./'
-    host_list = ['dev05.hortonworks.com','dev06.hortonworks.com']
-    expected_path = '//bootstrap'
-    expected_payload = {'hosts': ['dev05.hortonworks.com', 'dev06.hortonworks.com'], 'sshKey': 'abc!@#$%^&*()_:"|<>?[];\\\'\\\\./'}
-    expected_headers = {'Content-Type': 'application/json'}
-    expected_bootstrap_path = '/bootstrap/5'
-
-    client = self.create_client(http_client_mock)
-    resp = client.bootstrap_hosts(host_list, ssh_key)
-
-    self.assertEqual(resp.get_bootstrap_path(),expected_bootstrap_path)
-    http_client_mock.invoke.assert_called_with('POST', expected_path, headers=expected_headers, payload=expected_payload)

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/json/ambariclient_bootstrap_hosts.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/ambariclient_bootstrap_hosts.json b/ambari-client/src/test/python/json/ambariclient_bootstrap_hosts.json
deleted file mode 100644
index 1e60bd0..0000000
--- a/ambari-client/src/test/python/json/ambariclient_bootstrap_hosts.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-  "status" : "OK",
-  "log" : "Running Bootstrap now.",
-  "requestId" : 5
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/json/ambariclient_get_all_clusters.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/ambariclient_get_all_clusters.json b/ambari-client/src/test/python/json/ambariclient_get_all_clusters.json
deleted file mode 100644
index abbf619..0000000
--- a/ambari-client/src/test/python/json/ambariclient_get_all_clusters.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "href" : "http://localhost:8080/api/v1/clusters",
-  "items" : [
-    {
-      "href" : "http://localhost:8080/api/v1/clusters/test1",
-      "Clusters" : {
-        "cluster_name" : "test1",
-        "version" : "HDP-1.2.1"
-      }
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/json/ambariclient_get_all_hosts.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/ambariclient_get_all_hosts.json b/ambari-client/src/test/python/json/ambariclient_get_all_hosts.json
deleted file mode 100644
index 2aca5c4..0000000
--- a/ambari-client/src/test/python/json/ambariclient_get_all_hosts.json
+++ /dev/null
@@ -1,77 +0,0 @@
-{
-  "href" : "http://localhost:8080/api/v1/hosts",
-  "items" : [
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-83",
-      "Hosts" : {
-        "host_name" : "apspal44-83"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-84",
-      "Hosts" : {
-        "host_name" : "apspal44-84"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-85",
-      "Hosts" : {
-        "host_name" : "apspal44-85"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-86",
-      "Hosts" : {
-        "host_name" : "apspal44-86"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-87",
-      "Hosts" : {
-        "host_name" : "apspal44-87"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-88",
-      "Hosts" : {
-        "host_name" : "apspal44-88"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/apspal44-89",
-      "Hosts" : {
-        "host_name" : "apspal44-89"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/r01hn01",
-      "Hosts" : {
-        "host_name" : "r01hn01"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/r01mgt",
-      "Hosts" : {
-        "host_name" : "r01mgt"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/r01wn01",
-      "Hosts" : {
-        "host_name" : "r01wn01"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/r01wn02",
-      "Hosts" : {
-        "host_name" : "r01wn02"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/hosts/r01wn03",
-      "Hosts" : {
-        "host_name" : "r01wn03"
-      }
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/11dd9df8/ambari-client/src/test/python/json/ambariclient_get_components.json
----------------------------------------------------------------------
diff --git a/ambari-client/src/test/python/json/ambariclient_get_components.json b/ambari-client/src/test/python/json/ambariclient_get_components.json
deleted file mode 100644
index 56bb1b2..0000000
--- a/ambari-client/src/test/python/json/ambariclient_get_components.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
-  "href" : "http://localhost:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HDFS/serviceComponents?fields=*",
-  "items" : [
-    {
-      "href" : "http://localhost:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HDFS/serviceComponents/DATANODE",
-      "StackServiceComponents" : {
-        "component_category" : "SLAVE",
-        "component_name" : "DATANODE",
-        "is_client" : false,
-        "is_master" : false,
-        "service_name" : "HDFS",
-        "stack_name" : "HDP",
-        "stack_version" : "1.3.0"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HDFS/serviceComponents/HDFS_CLIENT",
-      "StackServiceComponents" : {
-        "component_category" : "CLIENT",
-        "component_name" : "HDFS_CLIENT",
-        "is_client" : true,
-        "is_master" : false,
-        "service_name" : "HDFS",
-        "stack_name" : "HDP",
-        "stack_version" : "1.3.0"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HDFS/serviceComponents/NAMENODE",
-      "StackServiceComponents" : {
-        "component_category" : "MASTER",
-        "component_name" : "NAMENODE",
-        "is_client" : false,
-        "is_master" : true,
-        "service_name" : "HDFS",
-        "stack_name" : "HDP",
-        "stack_version" : "1.3.0"
-      }
-    },
-    {
-      "href" : "http://localhost:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HDFS/serviceComponents/SECONDARY_NAMENODE",
-      "StackServiceComponents" : {
-        "component_category" : "MASTER",
-        "component_name" : "SECONDARY_NAMENODE",
-        "is_client" : false,
-        "is_master" : true,
-        "service_name" : "HDFS",
-        "stack_name" : "HDP",
-        "stack_version" : "1.3.0"
-      }
-    }
-  ]
-}
\ No newline at end of file