You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2015/01/15 00:38:17 UTC

[7/7] ambari git commit: AMBARI-9005 Port the AMS service scripts from HDP to HDPWIN (Eugene Chekanskiy via jluniya)

AMBARI-9005 Port the AMS service scripts from HDP to HDPWIN (Eugene Chekanskiy via jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3c202998
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3c202998
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3c202998

Branch: refs/heads/trunk
Commit: 3c202998ef286d5074be6b713780546b9f5d73ef
Parents: 7c98d6e
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Wed Jan 14 15:37:52 2015 -0800
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Wed Jan 14 15:37:52 2015 -0800

----------------------------------------------------------------------
 .../libraries/functions/reload_windows_env.py   |    3 +-
 .../libraries/providers/__init__.py             |    2 +-
 .../libraries/providers/msi.py                  |   66 +
 .../libraries/resources/__init__.py             |    3 +-
 .../libraries/resources/msi.py                  |   30 +
 .../libraries/script/script.py                  |   65 +-
 .../src/main/python/embedded_hbase_service.py   |    5 +-
 .../internal/AbstractProviderModule.java        |  121 +-
 .../server/controller/sql/HostInfoProvider.java |   63 -
 .../controller/sql/SQLPropertyProvider.java     |  572 ---
 .../controller/sql/SinkConnectionFactory.java   |  132 -
 .../common-services/AMS/0.1.0/metainfo.xml      |   14 +
 .../AMS/0.1.0/package/scripts/ams.py            |   81 +
 .../AMS/0.1.0/package/scripts/ams_service.py    |   12 +-
 .../AMS/0.1.0/package/scripts/hbase.py          |   85 +-
 .../0.1.0/package/scripts/metric_collector.py   |    4 +-
 .../AMS/0.1.0/package/scripts/metric_monitor.py |    5 +-
 .../AMS/0.1.0/package/scripts/params.py         |   34 +-
 .../AMS/0.1.0/package/scripts/params_linux.py   |   53 +
 .../AMS/0.1.0/package/scripts/params_windows.py |   35 +
 .../0.1.0/package/scripts/service_mapping.py    |   21 +
 .../AMS/0.1.0/package/scripts/status.py         |   38 +
 .../HDPWIN/2.1/configuration/cluster-env.xml    |   53 +-
 .../2.1/hooks/after-INSTALL/scripts/hook.py     |   18 -
 .../2.1/hooks/before-START/scripts/hook.py      |    9 +
 .../2.1/hooks/before-START/scripts/params.py    |   18 +-
 .../hadoop-metrics2-hbase.properties.j2         |   66 +
 .../templates/hadoop-metrics2.properties.j2     |   56 +
 .../2.1/services/AMS/configuration/ams-env.xml  |   33 +
 .../AMS/configuration/ams-hbase-env.xml         |   47 +
 .../AMS/configuration/ams-hbase-site.xml        |   45 +
 .../services/AMS/configuration/ams-log4j.xml    |   59 +
 .../2.1/services/AMS/configuration/ams-site.xml |   32 +
 .../stacks/HDPWIN/2.1/services/AMS/metainfo.xml |   26 +
 .../HDPWIN/2.1/services/HBASE/metrics.json      | 4659 ------------------
 .../HDPWIN/2.1/services/HDFS/metrics.json       | 2126 --------
 .../2.1/services/HDFS/package/scripts/hdfs.py   |   10 -
 .../2.1/services/HDFS/package/scripts/params.py |    9 +-
 .../templates/hadoop-metrics2.properties.j2     |   53 -
 .../HDPWIN/2.1/services/STORM/metrics.json      |   99 -
 .../HDPWIN/2.1/services/YARN/metrics.json       | 3138 ------------
 .../stacks/HDPWIN/2.2/services/AMS/metainfo.xml |   26 +
 42 files changed, 906 insertions(+), 11120 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py b/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
index f6f3626..49cb812 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
@@ -30,7 +30,8 @@ default_whitelist = ["FALCON_CONF_DIR", "FALCON_DATA_DIR", "FALCON_HOME", "FALCO
                      "HIVE_OPTS", "KNOX_CONF_DIR", "KNOX_HOME", "KNOX_LOG_DIR", "MAHOUT_HOME", "OOZIE_DATA",
                      "OOZIE_HOME", "OOZIE_LOG", "OOZIE_ROOT", "PIG_HOME", "SQOOP_HOME", "STORM_CONF_DIR", "STORM_HOME",
                      "STORM_LOG_DIR", "WEBHCAT_CONF_DIR", "YARN_LOG_DIR", "ZOOKEEPER_CONF_DIR", "ZOOKEEPER_HOME",
-                     "ZOOKEEPER_LIB_DIR", "ZOO_LOG_DIR"]
+                     "ZOOKEEPER_LIB_DIR", "ZOO_LOG_DIR", "COLLECTOR_CONF_DIR", "COLLECTOR_HOME", "MONITOR_CONF_DIR",
+                     "MONITOR_HOME", "SINK_HOME"]
 def reload_windows_env(keys_white_list=default_whitelist):
   root = HKEY_LOCAL_MACHINE
   subkey = r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index 80e0a14..c765218 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -31,7 +31,7 @@ PROVIDERS = dict(
     Repository="resource_management.libraries.providers.repository.UbuntuRepositoryProvider",
   ),
   winsrv=dict(
-
+    Msi="resource_management.libraries.providers.msi.MsiProvider"
   ),
   default=dict(
     ExecuteHadoop="resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider",

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-common/src/main/python/resource_management/libraries/providers/msi.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/msi.py b/ambari-common/src/main/python/resource_management/libraries/providers/msi.py
new file mode 100644
index 0000000..7f4deeb
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/msi.py
@@ -0,0 +1,66 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+"""
+
+
+from resource_management import *
+import urlparse
+from ambari_commons.inet_utils import download_file
+import os
+
+class MsiProvider(Provider):
+  MSI_INSTALL_COMMAND = "cmd /C start /wait msiexec /qn /i {msi_file_path} /lv {log_file_path}{list_args_str}{dict_args_str}"
+
+  def action_install(self):
+    name = self.resource.msi_name
+    msi_file_path = name
+    dict_args = self.resource.dict_args
+    list_args = self.resource.list_args
+    working_dir = os.path.abspath(Script.get_config()["hostLevelParams"]["agentCacheDir"])
+    http_source = self.resource.http_source
+
+    # name can be a path to file in local file system
+    msi_filename = os.path.split(name)[1]
+    log_file_path = os.path.join(working_dir, msi_filename) + ".log"
+    marker_file = os.path.join(working_dir, msi_filename) + ".installed"
+
+    # build string from passed arguments to Msi resource
+    dict_args_str = ' ALLUSERS="1"'
+    for k, v in dict_args.iteritems():
+      dict_args_str += " " + str(k)+"="+str(v)
+    list_args_str = ''
+    for a in list_args:
+      list_args_str += " /" + str(a)
+
+    # if http source present we download msi and then execute it
+    if http_source:
+      download_url = urlparse.urljoin(http_source, name)
+      msi_file_path = os.path.join(working_dir, msi_filename)
+      download_file(download_url, msi_file_path)
+    if not os.path.exists(marker_file):
+      Execute(MsiProvider.MSI_INSTALL_COMMAND.format(msi_file_path=msi_file_path,
+                                                     log_file_path=log_file_path,
+                                                     dict_args_str=dict_args_str,
+                                                     list_args_str=list_args_str).rstrip())
+      # writing marker file to not install new msi later
+      open(marker_file,"w").close()
+
+  def action_uninstall(self):
+    pass
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
index 24b497c..a0b533c 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
@@ -27,4 +27,5 @@ from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
 from resource_management.libraries.resources.hdfs_directory import *
-from resource_management.libraries.resources.copy_from_local import *
\ No newline at end of file
+from resource_management.libraries.resources.copy_from_local import *
+from resource_management.libraries.resources.msi import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-common/src/main/python/resource_management/libraries/resources/msi.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/msi.py b/ambari-common/src/main/python/resource_management/libraries/resources/msi.py
new file mode 100644
index 0000000..72d06c5
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/msi.py
@@ -0,0 +1,30 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+"""
+
+_all__ = ["Msi"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument
+
+class Msi(Resource):
+  action = ForcedListArgument(default="install")
+  msi_name = ResourceArgument(default=lambda obj: obj.name)
+  http_source = ResourceArgument(default=None)
+  dict_args = ResourceArgument(default={})
+  list_args = ResourceArgument(default={})
+  actions = Resource.actions + ["install", "uninstall"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index c14853d..9fe6957 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -26,7 +26,7 @@ import sys
 import json
 import logging
 import platform
-
+from ambari_commons.os_check import OSCheck
 from resource_management.libraries.resources import XmlConfig
 from resource_management.libraries.resources import PropertiesFile
 from resource_management.core.resources import File, Directory
@@ -39,11 +39,11 @@ from resource_management.libraries.functions.version_select_util import get_comp
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration
 
-IS_WINDOWS = platform.system() == "Windows"
-if IS_WINDOWS:
+if OSCheck.is_windows_family():
   from resource_management.libraries.functions.install_hdp_msi import install_windows_msi
   from resource_management.libraries.functions.reload_windows_env import reload_windows_env
   from resource_management.libraries.functions.zip_archive import archive_dir
+  from resource_management.libraries.resources import Msi
 else:
   from resource_management.libraries.functions.tar_archive import archive_dir
 
@@ -160,7 +160,7 @@ class Script(object):
     # on windows we need to reload some of env variables manually because there is no default paths for configs(like
     # /etc/something/conf on linux. When this env vars created by one of the Script execution, they can not be updated
     # in agent, so other Script executions will not be able to access to new env variables
-    if platform.system() == "Windows":
+    if OSCheck.is_windows_family():
       reload_windows_env()
 
     try:
@@ -243,41 +243,37 @@ class Script(object):
     """
     self.install_packages(env)
 
-
-  if not IS_WINDOWS:
-    def install_packages(self, env, exclude_packages=[]):
-      """
-      List of packages that are required< by service is received from the server
-      as a command parameter. The method installs all packages
-      from this list
-      """
-      config = self.get_config()
-      try:
-        package_list_str = config['hostLevelParams']['package_list']
-        if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
-          package_list = json.loads(package_list_str)
-          for package in package_list:
-            if not package['name'] in exclude_packages:
-              name = package['name']
+  def install_packages(self, env, exclude_packages=[]):
+    """
+    List of packages that are required< by service is received from the server
+    as a command parameter. The method installs all packages
+    from this list
+    """
+    config = self.get_config()
+    try:
+      package_list_str = config['hostLevelParams']['package_list']
+      if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
+        package_list = json.loads(package_list_str)
+        for package in package_list:
+          if not package['name'] in exclude_packages:
+            name = package['name']
+            if OSCheck.is_windows_family():
+              if name[-4:] == ".msi":
+                #TODO all msis must be located in resource folder of server, change it to repo later
+                Msi(name, http_source=os.path.join(config['hostLevelParams']['jdk_location']))
+            else:
               Package(name)
-      except KeyError:
-        pass  # No reason to worry
-
-        # RepoInstaller.remove_repos(config)
-      pass
-  else:
-    def install_packages(self, env, exclude_packages=[]):
-      """
-      List of packages that are required< by service is received from the server
-      as a command parameter. The method installs all packages
-      from this list
-      """
-      config = self.get_config()
+    except KeyError:
+      pass  # No reason to worry
 
+    if OSCheck.is_windows_family():
+      #TODO hacky install of windows msi, remove it or move to old(2.1) stack definition when component based install will be implemented
       install_windows_msi(os.path.join(config['hostLevelParams']['jdk_location'], "hdp.msi"),
                           config["hostLevelParams"]["agentCacheDir"], "hdp.msi", self.get_password("hadoop"),
                           str(config['hostLevelParams']['stack_version']))
-      pass
+      reload_windows_env()
+    # RepoInstaller.remove_repos(config)
+    pass
 
   def fail_with_error(self, message):
     """
@@ -287,6 +283,7 @@ class Script(object):
     sys.stderr.write("Error: " + message)
     sys.exit(1)
 
+
   def start(self, env, rolling_restart=False):
     """
     To be overridden by subclasses

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-metrics/ambari-metrics-timelineservice/src/main/python/embedded_hbase_service.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/python/embedded_hbase_service.py b/ambari-metrics/ambari-metrics-timelineservice/src/main/python/embedded_hbase_service.py
index a699774..70cdd41 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/python/embedded_hbase_service.py
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/python/embedded_hbase_service.py
@@ -30,6 +30,7 @@ from ambari_metrics_collector.serviceConfiguration import EMBEDDED_HBASE_MASTER_
 
 MASTER_JVM_ARGS = '{0} ' \
   '"-XX:+UseConcMarkSweepGC" "-Djava.net.preferIPv4Stack=true" ' \
+  '-Djava.library.path="{6}" ' \
   '-Dhadoop.home.dir="{1}" -Dhbase.log.dir="{2}" -Dhbase.log.file={3} -Dhbase.home.dir="{1}" -Dhbase.id.str="{4}" ' \
   '-Dhbase.root.logger="INFO,DRFA" -Dhbase.security.logger="INFO,RFAS" ' \
   '-classpath "{5}" org.apache.hadoop.hbase.master.HMaster start'
@@ -44,7 +45,7 @@ def _build_master_java_args(username = None):
   hbase_log_dir = os.path.join(os.sep, "var", "log", EMBEDDED_HBASE_MASTER_SERVICE)
   hbase_log_file = "hbase.log"
   hbase_user_id = username if username else "SYSTEM"
-
+  java_library_path = os.path.join(hbase_home_dir, "bin")
   if not os.path.exists(hbase_log_dir):
     os.makedirs(hbase_log_dir)
 
@@ -53,7 +54,7 @@ def _build_master_java_args(username = None):
   java_class_path += os.pathsep + hbase_home_dir
   java_class_path += os.pathsep + os.path.join(hbase_home_dir, "lib", "*")
 
-  args = MASTER_JVM_ARGS.format(build_jvm_args(), hbase_home_dir, hbase_log_dir, hbase_log_file, hbase_user_id, java_class_path)
+  args = MASTER_JVM_ARGS.format(build_jvm_args(), hbase_home_dir, hbase_log_dir, hbase_log_file, hbase_user_id, java_class_path, java_library_path)
 
   return args
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index bf68986..9a7528d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -44,9 +44,6 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
-import org.apache.ambari.server.controller.sql.HostInfoProvider;
-import org.apache.ambari.server.controller.sql.SQLPropertyProvider;
-import org.apache.ambari.server.controller.sql.SinkConnectionFactory;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.controller.utilities.StreamProvider;
@@ -78,7 +75,7 @@ import static org.apache.ambari.server.controller.metrics.MetricsServiceProvider
  */
 public abstract class AbstractProviderModule implements ProviderModule,
     ResourceProviderObserver, JMXHostProvider, MetricHostProvider,
-    MetricsServiceProvider, HostInfoProvider {
+    MetricsServiceProvider {
 
   private static final int PROPERTY_REQUEST_CONNECT_TIMEOUT = 5000;
   private static final int PROPERTY_REQUEST_READ_TIMEOUT    = 10000;
@@ -546,43 +543,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
     return value;
   }
 
-  // ----- HostInfoProvider -----------------------------------------------
-
-  @Override
-  public String getHostName(String id) throws SystemException {
-    return getClusterNodeName(id);
-  }
-
-  @Override
-  public String getHostAddress(String id) throws SystemException {
-    return getClusterHostAddress(id);
-  }
-
-
-  // get the hostname
-  private String getClusterNodeName(String hostname) throws SystemException {
-    try {
-      if (hostname.equalsIgnoreCase("localhost")) {
-        return InetAddress.getLocalHost().getCanonicalHostName();
-      }
-      return InetAddress.getByName(hostname).getCanonicalHostName();
-    } catch (Exception e) {
-      throw new SystemException("Error getting hostname.", e);
-    }
-  }
-
-  // get the hostname
-  private String getClusterHostAddress(String hostname) throws SystemException {
-    try {
-      if (hostname.equalsIgnoreCase("localhost")) {
-        return InetAddress.getLocalHost().getHostAddress();
-      }
-      return InetAddress.getByName(hostname).getHostAddress();
-    } catch (Exception e) {
-      throw new SystemException("Error getting ip address.", e);
-    }
-  }
-
   // ----- utility methods ---------------------------------------------------
 
   protected abstract ResourceProvider createResourceProvider(Resource.Type type);
@@ -656,15 +616,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
               PropertyHelper.getPropertyId("ServiceComponentInfo", "component_name"),
               PropertyHelper.getPropertyId("ServiceComponentInfo", "state"));
           PropertyProvider gpp = null;
-          if (System.getProperty("os.name").contains("Windows")) {
-            gpp = createSQLComponentPropertyProvider(
-                type,
-                this,
-                PropertyHelper.getPropertyId("ServiceComponentInfo", "cluster_name"),
-                PropertyHelper.getPropertyId("ServiceComponentInfo", "component_name"),
-                PropertyHelper.getPropertyId("ServiceComponentInfo", "service_name"));
-          } else {
-            gpp = createMetricsComponentPropertyProvider(
+          gpp = createMetricsComponentPropertyProvider(
               type,
               streamProvider,
               ComponentSSLConfiguration.instance(),
@@ -672,7 +624,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
               this,
               PropertyHelper.getPropertyId("ServiceComponentInfo", "cluster_name"),
               PropertyHelper.getPropertyId("ServiceComponentInfo", "component_name"));
-          }
           providers.add(new StackDefinedPropertyProvider(
               type,
               this,
@@ -699,25 +650,16 @@ public abstract class AbstractProviderModule implements ProviderModule,
               PropertyHelper.getPropertyId("HostRoles", "component_name"),
               PropertyHelper.getPropertyId("HostRoles", "state"));
           PropertyProvider gpp = null;
-          if (System.getProperty("os.name").contains("Windows")) {
-            gpp = createSQLHostComponentPropertyProvider(
-                type,
-                this,
-                PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
-                PropertyHelper.getPropertyId("HostRoles", "host_name"),
-                PropertyHelper.getPropertyId("HostRoles", "component_name"),
-                PropertyHelper.getPropertyId("HostRoles", "service_name"));
-          } else {
-            gpp = createMetricsHostComponentPropertyProvider(
-              type,
-              streamProvider,
-              ComponentSSLConfiguration.instance(),
-              this,
-              this,
-              PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
-              PropertyHelper.getPropertyId("HostRoles", "host_name"),
-              PropertyHelper.getPropertyId("HostRoles", "component_name"));
-          }
+          gpp = createMetricsHostComponentPropertyProvider(
+            type,
+            streamProvider,
+            ComponentSSLConfiguration.instance(),
+            this,
+            this,
+            PropertyHelper.getPropertyId("HostRoles", "cluster_name"),
+            PropertyHelper.getPropertyId("HostRoles", "host_name"),
+            PropertyHelper.getPropertyId("HostRoles", "component_name"));
+
           providers.add(new StackDefinedPropertyProvider(
               type,
               this,
@@ -1030,45 +972,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
       componentNamePropertyId);
   }
 
-  /**
-   * Create the SQL component property provider for the given type.
-   */
-  private PropertyProvider createSQLComponentPropertyProvider(Resource.Type type,
-                                                              HostInfoProvider hostProvider,
-                                                              String clusterNamePropertyId,
-                                                              String componentNamePropertyId,
-                                                              String serviceNamePropertyId) {
-    return new SQLPropertyProvider(
-        PropertyHelper.getSQLServerPropertyIds(type),
-        hostProvider,
-        clusterNamePropertyId,
-        null,
-        componentNamePropertyId,
-        serviceNamePropertyId,
-        SinkConnectionFactory.instance());
-  }
-
-
-  /**
-   * Create the SQL host component property provider for the given type.
-   */
-  private PropertyProvider createSQLHostComponentPropertyProvider(Resource.Type type,
-                                                                  HostInfoProvider hostProvider,
-                                                                  String clusterNamePropertyId,
-                                                                  String hostNamePropertyId,
-                                                                  String componentNamePropertyId,
-                                                                  String serviceNamePropertyId) {
-
-    return new SQLPropertyProvider(
-        PropertyHelper.getSQLServerPropertyIds(type),
-        hostProvider,
-        clusterNamePropertyId,
-        hostNamePropertyId,
-        componentNamePropertyId,
-        serviceNamePropertyId,
-        SinkConnectionFactory.instance());
-  }
-
   @Override
   public String getJMXProtocol(String clusterName, String componentName) {
     String jmxProtocolString = clusterJmxProtocolMap.get(clusterName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/HostInfoProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/HostInfoProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/HostInfoProvider.java
deleted file mode 100644
index 08bbe71..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/HostInfoProvider.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.controller.sql;
-
-import org.apache.ambari.server.controller.spi.SystemException;
-
-/**
- * Provider of host information.
- */
-public interface HostInfoProvider {
-
-  /**
-   * Get the host name for the given cluster name and component name.
-   *
-   * @param clusterName    the cluster name
-   * @param componentName  the component name
-   *
-   * @return the host name
-   *
-   * @throws SystemException if unable to get the host name
-   */
-  public String getHostName(String clusterName, String componentName)
-      throws SystemException;
-
-  /**
-   * Get the host name.
-   *
-   * @param id  the host identifier
-   *
-   * @return the host name
-   *
-   * @throws SystemException if unable to get the host name
-   */
-  public String getHostName(String id)
-      throws SystemException;
-
-  /**
-   * Get the host ip address.
-   *
-   * @param id  the host identifier
-   *
-   * @return the host ip address
-   *
-   * @throws SystemException if unable to get the host address
-   */
-  public String getHostAddress(String id)
-      throws SystemException;
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SQLPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SQLPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SQLPropertyProvider.java
deleted file mode 100644
index 30f82fe..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SQLPropertyProvider.java
+++ /dev/null
@@ -1,572 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.controller.sql;
-
-import org.apache.ambari.server.controller.internal.AbstractPropertyProvider;
-import org.apache.ambari.server.controller.internal.PropertyInfo;
-import org.apache.ambari.server.controller.jdbc.ConnectionFactory;
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.Request;
-import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.controller.spi.SystemException;
-import org.apache.ambari.server.controller.spi.TemporalInfo;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.Serializable;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.NumberFormat;
-import java.text.ParsePosition;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * SQL based property/metrics provider required for ambari-scom.
- */
-public class SQLPropertyProvider extends AbstractPropertyProvider {
-
-  private final HostInfoProvider hostProvider;
-
-  private final String clusterNamePropertyId;
-
-  private final String hostNamePropertyId;
-
-  private final String componentNamePropertyId;
-
-  private final String serviceNamePropertyId;
-
-  private final ConnectionFactory connectionFactory;
-
-
-  // ----- Constants ---------------------------------------------------------
-
-  private static final String GET_METRICS_STATEMENT =
-    "SELECT  s.RecordTypeContext, s.RecordTypeName, s.TagPairs, s.NodeName, s.ServiceName, mn.Name AS MetricName, s.RecordTimeStamp, mp.MetricValue\n" +
-      "FROM HadoopMetrics.dbo.MetricPair mp\n" +
-      "     INNER JOIN (\n" +
-      "         SELECT mr.RecordID AS RecordID, mr.RecordTimeStamp AS RecordTimeStamp, rt.Context AS RecordTypeContext, rt.Name AS RecordTypeName, ts.TagPairs AS TagPairs, nd.Name AS NodeName, sr.Name AS ServiceName\n" +
-      "         FROM HadoopMetrics.dbo.MetricRecord mr\n" +
-      "              INNER JOIN HadoopMetrics.dbo.RecordType rt ON (mr.RecordTypeId = rt.RecordTypeId)\n" +
-      "              INNER JOIN HadoopMetrics.dbo.TagSet ts ON (mr.TagSetID = ts.TagSetID)\n" +
-      "              INNER JOIN HadoopMetrics.dbo.Node nd ON (mr.NodeID = nd.NodeID)\n" +
-      "              INNER JOIN HadoopMetrics.dbo.Service sr ON (mr.ServiceID = sr.ServiceID)\n" +
-      "         WHERE rt.Context in (%s)\n" +
-      "               AND rt.Name in (%s)\n" +
-      "               AND (ts.TagPairs LIKE %s)\n" +
-      "               AND (nd.Name in (%s))\n" +
-      "               AND (sr.Name in (%s))\n" +
-      "               AND mr.RecordTimestamp >= %d\n" +
-      "               AND mr.RecordTimestamp <= %d\n" +
-      "     ) s ON (mp.RecordID = s.RecordID)\n" +
-      "     INNER JOIN HadoopMetrics.dbo.MetricName mn ON (mp.MetricID = mn.MetricID)\n" +
-      "WHERE (mn.Name in (%s))";
-
-  protected final static Logger LOG = LoggerFactory.getLogger(SQLPropertyProvider.class);
-
-
-  // ----- Constructors ------------------------------------------------------
-
-  public SQLPropertyProvider(
-    Map<String, Map<String, PropertyInfo>> componentPropertyInfoMap,
-    HostInfoProvider hostProvider,
-    String clusterNamePropertyId,
-    String hostNamePropertyId,
-    String componentNamePropertyId,
-    String serviceNamePropertyId,
-    ConnectionFactory connectionFactory) {
-    super(componentPropertyInfoMap);
-    this.hostProvider = hostProvider;
-    this.clusterNamePropertyId = clusterNamePropertyId;
-    this.hostNamePropertyId = hostNamePropertyId;
-    this.componentNamePropertyId = componentNamePropertyId;
-    this.serviceNamePropertyId = serviceNamePropertyId;
-    this.connectionFactory = connectionFactory;
-  }
-
-
-  // ----- PropertyProvider --------------------------------------------------
-
-  @Override
-  public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate)
-    throws SystemException {
-    Set<Resource> keepers = new HashSet<Resource>();
-    try {
-      Connection connection = connectionFactory.getConnection();
-      try {
-        Statement statement = connection.createStatement();
-        try {
-          for (Resource resource : resources) {
-            if (populateResource(resource, request, predicate, statement)) {
-              keepers.add(resource);
-            }
-          }
-        } finally {
-          statement.close();
-        }
-      } finally {
-        connection.close();
-      }
-    } catch (SQLException e) {
-      if (LOG.isErrorEnabled()) {
-        LOG.error("Error during populateResources call.");
-        LOG.debug("Error during populateResources call : caught exception", e);
-      }
-    }
-    return keepers;
-  }
-
-
-  // ----- helper methods ----------------------------------------------------
-
-  // Populate the given resource
-  private boolean populateResource(Resource resource, Request request, Predicate predicate, Statement statement) throws SystemException {
-
-    Set<String> ids = getRequestPropertyIds(request, predicate);
-    if (ids.isEmpty()) {
-      // no properties requested ... nothing to do.
-      return true;
-    }
-
-    String componentName = (String) resource.getPropertyValue(componentNamePropertyId);
-    String serviceName = (String) resource.getPropertyValue(serviceNamePropertyId);
-
-    if (getComponentMetrics().get(componentName) == null) {
-      // no metrics defined for the given component ... nothing to do.
-      return true;
-    }
-
-    String clusterName = (String) resource.getPropertyValue(clusterNamePropertyId);
-    String hostName = getHost(resource, clusterName, componentName);
-
-    if (hostName == null) {
-      throw new SystemException(
-        "Unable to get metrics.  No host name for " + componentName, null);
-    }
-
-    Set<MetricDefinition> metricsDefinitionSet = new HashSet<MetricDefinition>();
-    for (String id : ids) {
-      Map<String, PropertyInfo> propertyInfoMap = getPropertyInfoMap(componentName, id);
-
-      for (Map.Entry<String, PropertyInfo> entry : propertyInfoMap.entrySet()) {
-        String propertyKey = entry.getKey();
-        PropertyInfo propertyInfo = entry.getValue();
-        if (containsArguments(propertyKey)) {
-          propertyInfo = updatePropertyInfo(propertyKey, id, propertyInfo);
-        }
-
-        String propertyId = propertyInfo.getPropertyId();
-        TemporalInfo temporalInfo = request.getTemporalInfo(id);
-
-        if ((propertyInfo.isPointInTime() && temporalInfo == null) ||
-          (propertyInfo.isTemporal() && temporalInfo != null)) {
-
-          long startTime;
-          long endTime;
-
-          if (temporalInfo != null) {
-            Long endTimeSeconds = temporalInfo.getEndTime();
-
-            endTime = endTimeSeconds != -1 ? endTimeSeconds * 1000 : Long.MAX_VALUE;
-            startTime = temporalInfo.getStartTime() * 1000;
-          } else {
-            startTime = 0L;
-            endTime = Long.MAX_VALUE;
-          }
-
-          String category = "";
-          String recordTypeContext = "";
-          String recordTypeName = "";
-          String metricName = "";
-          String tagPairsPattern = ",";
-          int dotIndex = propertyId.lastIndexOf('.');
-          if (dotIndex != -1) {
-            category = propertyId.substring(0, dotIndex);
-            metricName = propertyId.substring(dotIndex + 1);
-          }
-          String[] parts = category.split("\\.");
-          if (parts.length >= 2) {
-            recordTypeContext = parts[0];
-            recordTypeName = parts[1];
-            if (containsArguments(propertyKey) && parts.length > 2) {
-              tagPairsPattern = StringUtils.join(Arrays.copyOfRange(parts, 2, parts.length), ".");
-            }
-            metricsDefinitionSet.add(
-              new MetricDefinition(
-                startTime,
-                endTime,
-                recordTypeContext,
-                recordTypeName,
-                tagPairsPattern,
-                metricName,
-                serviceName != null && serviceName.toLowerCase().equals("hbase") ? serviceName.toLowerCase() : componentName.toLowerCase(),
-                hostName,
-                propertyKey,
-                id,
-                temporalInfo)
-            );
-          } else {
-            if (LOG.isWarnEnabled()) {
-              LOG.warn("Can't get metrics for " + id + " : " + propertyId);
-            }
-          }
-        }
-      }
-    }
-
-    Map<MetricDefinition, List<DataPoint>> results = getMetric(metricsDefinitionSet, statement);
-
-    for (MetricDefinition metricDefinition : metricsDefinitionSet) {
-      List<DataPoint> dataPoints = results.containsKey(metricDefinition) ? results.get(metricDefinition) : new ArrayList<DataPoint>();
-      TemporalInfo temporalInfo = metricDefinition.getTemporalInfo();
-      String propertyKey = metricDefinition.getPropertyKey();
-      String requestedPropertyKey = metricDefinition.getRequestedPropertyKey();
-      if (dataPoints != null) {
-        if (temporalInfo == null) {
-          // return the value of the last data point
-          int length = dataPoints.size();
-          Serializable value = length > 0 ? dataPoints.get(length - 1).getValue() : 0;
-          resource.setProperty(propertyKey, value);
-        } else {
-          Number[][] dp = new Number[dataPoints.size()][2];
-          for (int i = 0; i < dp.length; i++) {
-            dp[i][0] = dataPoints.get(i).getValue();
-            dp[i][1] = dataPoints.get(i).getTimestamp() / 1000;
-          }
-          if (containsArguments(propertyKey)) {
-            resource.setProperty(requestedPropertyKey, dp);
-          } else {
-            resource.setProperty(propertyKey, dp);
-          }
-        }
-      }
-    }
-
-    return true;
-  }
-
-  // get a metric from a sql connection
-  private Map<MetricDefinition, List<DataPoint>> getMetric(Set<MetricDefinition> metricDefinitionSet, Statement statement) throws SystemException {
-    Map<MetricDefinition, List<DataPoint>> results = new HashMap<MetricDefinition, List<DataPoint>>();
-    try {
-      StringBuilder query = new StringBuilder();
-      Set<String> recordTypeContexts = new HashSet<String>();
-      Set<String> recordTypeNamess = new HashSet<String>();
-      Set<String> tagPairsPatterns = new HashSet<String>();
-      Set<String> nodeNames = new HashSet<String>();
-      Set<String> serviceNames = new HashSet<String>();
-      Set<String> metricNames = new HashSet<String>();
-      long startTime = 0, endTime = 0;
-      for (MetricDefinition metricDefinition : metricDefinitionSet) {
-        if (metricDefinition.getRecordTypeContext() == null || metricDefinition.getRecordTypeName() == null || metricDefinition.getNodeName() == null) {
-          continue;
-        }
-
-        recordTypeContexts.add(metricDefinition.getRecordTypeContext());
-        recordTypeNamess.add(metricDefinition.getRecordTypeName());
-        tagPairsPatterns.add(metricDefinition.getTagPairsPattern());
-        nodeNames.add(metricDefinition.getNodeName());
-        serviceNames.add(metricDefinition.getServiceName());
-        metricNames.add(metricDefinition.getMetricName());
-        startTime = metricDefinition.getStartTime();
-        endTime = metricDefinition.getEndTime();
-      }
-
-      for (String tagPairsPattern : tagPairsPatterns) {
-        if (query.length() != 0) {
-          query.append("\nUNION\n");
-        }
-        query.append(String.format(GET_METRICS_STATEMENT,
-          "'" + StringUtils.join(recordTypeContexts, "','") + "'",
-          "'" + StringUtils.join(recordTypeNamess, "','") + "'",
-          "'%" + tagPairsPattern + "%'",
-          "'" + StringUtils.join(nodeNames, "','") + "'",
-          "'" + StringUtils.join(serviceNames, "','") + "'",
-          startTime,
-          endTime,
-          "'" + StringUtils.join(metricNames, "','") + "'"
-        ));
-      }
-
-      ResultSet rs = null;
-      if (query.length() != 0) {
-        rs = statement.executeQuery(query.toString());
-      }
-
-      if (rs != null) {
-        //(RecordTimeStamp bigint, MetricValue NVARCHAR(512))
-        while (rs.next()) {
-          MetricDefinition metricDefinition = null;
-          for (MetricDefinition md : metricDefinitionSet) {
-            if (md.getRecordTypeContext().equalsIgnoreCase(rs.getString("RecordTypeContext"))
-              && md.getRecordTypeName().equalsIgnoreCase(rs.getString("RecordTypeName"))
-              && md.getMetricName().equalsIgnoreCase(rs.getString("MetricName"))
-              && md.getServiceName().equalsIgnoreCase(rs.getString("ServiceName"))
-              && md.getNodeName().equalsIgnoreCase(rs.getString("NodeName"))
-              && rs.getString("TagPairs").contains(md.getTagPairsPattern())) {
-              metricDefinition = md;
-              break;
-            }
-          }
-          if (metricDefinition == null) {
-            LOG.error("Error during getMetric call : No metricdefinition found for  result");
-            continue;
-          }
-          ParsePosition parsePosition = new ParsePosition(0);
-          NumberFormat numberFormat = NumberFormat.getInstance();
-          Number parsedNumber = numberFormat.parse(rs.getString("MetricValue"), parsePosition);
-          if (results.containsKey(metricDefinition)) {
-            results.get(metricDefinition).add(new DataPoint(rs.getLong("RecordTimeStamp"), parsedNumber));
-          } else {
-            List<DataPoint> dataPoints = new ArrayList<DataPoint>();
-            dataPoints.add(new DataPoint(rs.getLong("RecordTimeStamp"), parsedNumber));
-            results.put(metricDefinition, dataPoints);
-          }
-        }
-      }
-    } catch (SQLException e) {
-      throw new SystemException("Error during getMetric call : caught exception - ", e);
-    }
-    return results;
-  }
-
-  // get the hostname for a given resource
-  private String getHost(Resource resource, String clusterName, String componentName) throws SystemException {
-    return hostNamePropertyId == null ?
-      hostProvider.getHostName(clusterName, componentName) :
-      hostProvider.getHostName((String) resource.getPropertyValue(hostNamePropertyId));
-  }
-
-
-  // ----- inner class : DataPoint -------------------------------------------
-
-  /**
-   * Structure to hold a single datapoint (value/timestamp pair) retrieved from the db.
-   */
-  private static class DataPoint {
-    private final long timestamp;
-    private final Number value;
-
-    // ----- Constructor -------------------------------------------------
-
-    /**
-     * Construct a data point from the given value and timestamp.
-     *
-     * @param timestamp the timestamp
-     * @param value     the value
-     */
-    private DataPoint(long timestamp, Number value) {
-      this.timestamp = timestamp;
-      this.value = value;
-    }
-
-    // ----- DataPoint ---------------------------------------------------
-
-    /**
-     * Get the timestamp value.
-     *
-     * @return the timestamp
-     */
-    public long getTimestamp() {
-      return timestamp;
-    }
-
-    /**
-     * Get the value.
-     *
-     * @return the value
-     */
-    public Number getValue() {
-      return value;
-    }
-
-    // ----- Object overrides --------------------------------------------
-
-    @Override
-    public String toString() {
-      return "{" + value + " : " + timestamp + "}";
-    }
-  }
-
-  private class MetricDefinition {
-    long startTime;
-    long endTime;
-
-    String recordTypeContext;
-    String recordTypeName;
-    String tagPairsPattern;
-    String metricName;
-    String serviceName;
-    String nodeName;
-
-    String propertyKey;
-    String requestedPropertyKey;
-    TemporalInfo temporalInfo;
-
-    private MetricDefinition(long startTime, long endTime, String recordTypeContext, String recordTypeName, String tagPairsPattern, String metricName, String serviceName, String nodeName, String propertyKey, String requestedPropertyKey, TemporalInfo temporalInfo) {
-      this.startTime = startTime;
-      this.endTime = endTime;
-      this.recordTypeContext = recordTypeContext;
-      this.recordTypeName = recordTypeName;
-      this.tagPairsPattern = tagPairsPattern;
-      this.metricName = metricName;
-      this.serviceName = serviceName;
-      this.nodeName = nodeName;
-      this.propertyKey = propertyKey;
-      this.requestedPropertyKey = requestedPropertyKey;
-      this.temporalInfo = temporalInfo;
-    }
-
-    private MetricDefinition(String recordTypeContext, String recordTypeName, String tagPairsPattern, String metricName, String serviceName, String nodeName) {
-      this.recordTypeContext = recordTypeContext;
-      this.recordTypeName = recordTypeName;
-      this.tagPairsPattern = tagPairsPattern;
-      this.metricName = metricName;
-      this.serviceName = serviceName;
-      this.nodeName = nodeName;
-    }
-
-    public long getStartTime() {
-      return startTime;
-    }
-
-    public void setStartTime(long startTime) {
-      this.startTime = startTime;
-    }
-
-    public long getEndTime() {
-      return endTime;
-    }
-
-    public void setEndTime(long endTime) {
-      this.endTime = endTime;
-    }
-
-    public String getRecordTypeContext() {
-      return recordTypeContext;
-    }
-
-    public void setRecordTypeContext(String recordTypeContext) {
-      this.recordTypeContext = recordTypeContext;
-    }
-
-    public String getRecordTypeName() {
-      return recordTypeName;
-    }
-
-    public void setRecordTypeName(String recordTypeName) {
-      this.recordTypeName = recordTypeName;
-    }
-
-    public String getTagPairsPattern() {
-      return tagPairsPattern;
-    }
-
-    public void getTagPairsPattern(String tagPairsPattern) {
-      this.tagPairsPattern = tagPairsPattern;
-    }
-
-    public String getMetricName() {
-      return metricName;
-    }
-
-    public void setMetricName(String metricName) {
-      this.metricName = metricName;
-    }
-
-    public String getServiceName() {
-      return serviceName;
-    }
-
-    public void setServiceName(String serviceName) {
-      this.serviceName = serviceName;
-    }
-
-    public String getNodeName() {
-      return nodeName;
-    }
-
-    public void setNodeName(String nodeName) {
-      this.nodeName = nodeName;
-    }
-
-    public String getPropertyKey() {
-      return propertyKey;
-    }
-
-    public void setPropertyKey(String propertyKey) {
-      this.propertyKey = propertyKey;
-    }
-
-    public String getRequestedPropertyKey() {
-      return requestedPropertyKey;
-    }
-
-    public void setRequestedPropertyKey(String requestedPropertyKey) {
-      this.requestedPropertyKey = requestedPropertyKey;
-    }
-
-    public TemporalInfo getTemporalInfo() {
-      return temporalInfo;
-    }
-
-    public void setTemporalInfo(TemporalInfo temporalInfo) {
-      this.temporalInfo = temporalInfo;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-      if (this == o) return true;
-      if (o == null || getClass() != o.getClass()) return false;
-
-      MetricDefinition that = (MetricDefinition) o;
-
-      if (metricName != null ? !metricName.equals(that.metricName) : that.metricName != null) return false;
-      if (nodeName != null ? !nodeName.equalsIgnoreCase(that.nodeName) : that.nodeName != null) return false;
-      if (recordTypeContext != null ? !recordTypeContext.equals(that.recordTypeContext) : that.recordTypeContext != null)
-        return false;
-      if (recordTypeName != null ? !recordTypeName.equals(that.recordTypeName) : that.recordTypeName != null)
-        return false;
-      if (serviceName != null ? !serviceName.equals(that.serviceName) : that.serviceName != null) return false;
-      if (tagPairsPattern != null ? !(tagPairsPattern.contains(that.tagPairsPattern) ||
-        that.tagPairsPattern.contains(tagPairsPattern)) : that.tagPairsPattern != null)
-        return false;
-
-      return true;
-    }
-
-    @Override
-    public int hashCode() {
-      int result = recordTypeContext != null ? recordTypeContext.hashCode() : 0;
-      result = 31 * result + (recordTypeName != null ? recordTypeName.hashCode() : 0);
-      result = 31 * result + (metricName != null ? metricName.hashCode() : 0);
-      result = 31 * result + (serviceName != null ? serviceName.hashCode() : 0);
-      result = 31 * result + (nodeName != null ? nodeName.toLowerCase().hashCode() : 0);
-      return result;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SinkConnectionFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SinkConnectionFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SinkConnectionFactory.java
deleted file mode 100644
index 154926c..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/sql/SinkConnectionFactory.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.controller.sql;
-
-import com.mchange.v2.c3p0.ComboPooledDataSource;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.jdbc.ConnectionFactory;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-
-/**
- * Factory for the sink database connection.
- */
-public class SinkConnectionFactory implements ConnectionFactory {
-
-  /**
-   * The database URL.
-   */
-  private String databaseUrl;
-
-  /**
-   * The database driver.
-   */
-  private String databaseDriver;
-
-  private String databaseUser;
-
-  private String databasePassword;
-
-  private boolean useIntegratedAuth;
-
-  /**
-   * Indicates whether or not the driver has been initialized
-   */
-  private boolean connectionInitialized = false;
-
-  private ComboPooledDataSource cpds;
-  /**
-   * The singleton.
-   */
-  private static SinkConnectionFactory singleton = new SinkConnectionFactory();
-
-  // ----- Constructor -------------------------------------------------------
-
-  protected SinkConnectionFactory() {
-    Configuration config = new Configuration();
-    this.databaseUrl    = config.getSinkDatabaseUrl();
-    this.databaseDriver = config.getSinkDatabaseDriver();
-    this.useIntegratedAuth = config.getSinkUseIntegratedAuth();
-    this.databaseUser = config.getSinkDatabaseUser();
-    this.databasePassword =  config.getSinkDatabasePassword();
-  }
-
-
-  // ----- SinkConnectionFactory ---------------------------------------------
-
-  /**
-   * Initialize.
-   */
-  public void init() {
-    this.cpds = new ComboPooledDataSource();
-    this.cpds.setJdbcUrl(this.databaseUrl);
-    if(!useIntegratedAuth) {
-      this.cpds.setUser(this.databaseUser);
-      this.cpds.setPassword(this.databasePassword);
-    }
-    this.cpds.setMaxPoolSize(5);
-  }
-
-  /**
-   * Get the singleton instance.
-   *
-   * @return the singleton instance
-   */
-  public static SinkConnectionFactory instance() {
-    return singleton;
-  }
-
-  /**
-   * Get the database URL.
-   *
-   * @return the database URL
-   */
-  public String getDatabaseUrl() {
-    return databaseUrl;
-  }
-
-  /**
-   * Get the database driver.
-   *
-   * @return the database driver
-   */
-  public String getDatabaseDriver() {
-    return databaseDriver;
-  }
-
-// ----- ConnectionFactory -----------------------------------------------
-
-  @Override
-  public Connection getConnection() throws SQLException {
-    synchronized (this) {
-      if (!connectionInitialized) {
-        try {
-          Class.forName(databaseDriver);
-        } catch (Exception e) {
-          throw new SQLException("Can't load the driver class.", e);
-        }
-        init();
-        connectionInitialized = true;
-      }
-    }
-    return this.cpds.getConnection();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/metainfo.xml b/ambari-server/src/main/resources/common-services/AMS/0.1.0/metainfo.xml
index 3551da8..bcccb1d 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/metainfo.xml
@@ -92,6 +92,20 @@
             </package>
           </packages>
         </osSpecific>
+        <osSpecific>
+          <osFamily>winsrv6</osFamily>
+          <packages>
+            <package>
+              <name>ambari-metrics-collector.msi</name>
+            </package>
+            <package>
+              <name>ambari-metrics-monitor.msi</name>
+            </package>
+            <package>
+              <name>ambari-metrics-hadoop-sink.msi</name>
+            </package>
+          </packages>
+        </osSpecific>
       </osSpecifics>
 
       <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
index 81732b8..587f3f6 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
@@ -19,8 +19,89 @@ limitations under the License.
 """
 
 from resource_management import *
+from ambari_commons import OSConst
+from service_mapping import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+import glob
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def ams(name=None):
+  import params
+  if name == 'collector':
+    if not check_windows_service_exists(collector_win_service_name):
+      Execute(format("cmd /C cd {ams_collector_home_dir} & ambari-metrics-collector.cmd setup"))
+
+    Directory(params.ams_collector_conf_dir,
+              owner=params.ams_user,
+              recursive=True
+    )
+
+    Directory(params.ams_checkpoint_dir,
+              owner=params.ams_user,
+              recursive=True
+    )
+
+    XmlConfig("ams-site.xml",
+              conf_dir=params.ams_collector_conf_dir,
+              configurations=params.config['configurations']['ams-site'],
+              configuration_attributes=params.config['configuration_attributes']['ams-site'],
+              owner=params.ams_user,
+    )
+
+    XmlConfig( "hbase-site.xml",
+               conf_dir = params.ams_collector_conf_dir,
+               configurations = params.config['configurations']['ams-hbase-site'],
+               configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'],
+               owner = params.ams_user,
+    )
+
+    if (params.log4j_props != None):
+      File(os.path.join(params.ams_collector_conf_dir, "log4j.properties"),
+           owner=params.ams_user,
+           content=params.log4j_props
+      )
+
+    File(os.path.join(params.ams_collector_conf_dir, "ams-env.cmd"),
+         owner=params.ams_user,
+         content=InlineTemplate(params.ams_env_sh_template)
+    )
+
+    pass
+
+  elif name == 'monitor':
+    if not check_windows_service_exists(monitor_win_service_name):
+      Execute(format("cmd /C cd {ams_monitor_home_dir} & ambari-metrics-monitor.cmd setup"))
+
+    # creating symbolic links on ams jars to make them available to services
+    links_pairs = [
+      ("%HADOOP_HOME%\\share\\hadoop\\common\\lib\\ambari-metrics-hadoop-sink-with-common.jar",
+       "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"),
+      ("%HBASE_HOME%\\lib\\ambari-metrics-hadoop-sink-with-common.jar",
+       "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"),
+    ]
+    for link_pair in links_pairs:
+      link, target = link_pair
+      target = glob.glob(os.path.expandvars(target))[0].replace("\\\\", "\\")
+      Execute('cmd /c mklink "{0}" "{1}"'.format(link, target))
+
+    Directory(params.ams_monitor_conf_dir,
+              owner=params.ams_user,
+              recursive=True
+    )
+
+    TemplateConfig(
+      os.path.join(params.ams_monitor_conf_dir, "metric_monitor.ini"),
+      owner=params.ams_user,
+      template_tag=None
+    )
+
+    TemplateConfig(
+      os.path.join(params.ams_monitor_conf_dir, "metric_groups.conf"),
+      owner=params.ams_user,
+      template_tag=None
+    )
 
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def ams(name=None):
   import params
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams_service.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams_service.py
index 2785c38..57d53e5 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams_service.py
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams_service.py
@@ -19,9 +19,19 @@ limitations under the License.
 """
 
 from resource_management import *
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def ams_service(name, action):
+  import service_mapping
+  if name == 'collector':
+    Service(service_mapping.collector_win_service_name, action=action)
+  elif name == 'monitor':
+    Service(service_mapping.monitor_win_service_name, action=action)
 
-def ams_service(name='collector', action='start'):
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def ams_service(name, action):
   import params
 
   if name == 'collector':

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/hbase.py
index d861338..1496035 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/hbase.py
@@ -18,10 +18,80 @@ limitations under the License.
 
 """
 import os
-
+from ambari_commons import OSConst
 from resource_management import *
-import sys
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hbase(name=None):
+  import params
+  Directory(params.hbase_conf_dir,
+            owner = params.hadoop_user,
+            recursive = True
+  )
+  Directory(params.hbase_tmp_dir,
+             recursive = True,
+             owner = params.hadoop_user
+  )
 
+  Directory (os.path.join(params.local_dir, "jars"),
+             owner = params.hadoop_user,
+             recursive = True
+  )
+
+  XmlConfig("hbase-site.xml",
+            conf_dir = params.hbase_conf_dir,
+            configurations = params.config['configurations']['ams-hbase-site'],
+            configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'],
+            owner = params.hadoop_user
+  )
+
+  if 'ams-hbase-policy' in params.config['configurations']:
+    XmlConfig("hbase-policy.xml",
+              conf_dir = params.hbase_conf_dir,
+              configurations = params.config['configurations']['ams-hbase-policy'],
+              configuration_attributes=params.config['configuration_attributes']['ams-hbase-policy'],
+              owner = params.hadoop_user
+    )
+  # Manually overriding ownership of file installed by hadoop package
+  else:
+    File(os.path.join(params.hbase_conf_dir, "hbase-policy.xml"),
+          owner = params.hadoop_user
+    )
+
+  # File(format("{hbase_conf_dir}/hbase-env.cmd"),
+  #      owner = params.hadoop_user,
+  #      content=InlineTemplate(params.hbase_env_sh_template)
+  # )
+
+  # Metrics properties
+  # File(os.path.join(params.hbase_conf_dir, "hadoop-metrics2-hbase.properties"),
+  #      owner = params.hadoop_user,
+  #      content=Template("hadoop-metrics2-hbase.properties.j2")
+  # )
+
+  hbase_TemplateConfig('regionservers', user=params.hadoop_user)
+
+  if params.security_enabled:
+    hbase_TemplateConfig(format("hbase_{name}_jaas.conf"), user=params.hadoop_user)
+
+  if name != "client":
+    Directory (params.hbase_log_dir,
+               owner = params.hadoop_user,
+               recursive = True
+    )
+
+  if (params.hbase_log4j_props != None):
+    File(os.path.join(params.hbase_conf_dir, "log4j.properties"),
+         owner=params.hadoop_user,
+         content=params.hbase_log4j_props
+    )
+  elif (os.path.exists(os.path.join(params.hbase_conf_dir,"log4j.properties"))):
+    File(os.path.join(params.hbase_conf_dir,"log4j.properties"),
+         owner=params.hadoop_user
+    )
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hbase(name=None # 'master' or 'regionserver' or 'client'
               ):
   import params
@@ -83,10 +153,10 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
   #   tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
   # )
 
-  hbase_TemplateConfig('regionservers')
+  hbase_TemplateConfig('regionservers', user=params.hbase_user)
 
   if params.security_enabled:
-    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"))
+    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"), user=params.hbase_user)
   
   if name != "client":
     Directory( params.hbase_pid_dir,
@@ -113,11 +183,10 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
       owner=params.hbase_user
     )
 
-
-def hbase_TemplateConfig(name, tag=None):
+def hbase_TemplateConfig(name, tag=None, user=None):
   import params
 
-  TemplateConfig( format("{hbase_conf_dir}/{name}"),
-      owner = params.hbase_user,
+  TemplateConfig( os.path.join(params.hbase_conf_dir, name),
+      owner = user,
       template_tag = tag
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_collector.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_collector.py
index ae71aa1..2cba771 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_collector.py
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_collector.py
@@ -22,6 +22,7 @@ from resource_management import *
 from ams import ams
 from ams_service import ams_service
 from hbase import hbase
+from status import check_service_status
 
 class AmsCollector(Script):
     def install(self, env):
@@ -55,8 +56,7 @@ class AmsCollector(Script):
     def status(self, env):
         import status_params
         env.set_params(status_params)
-        pid_file = format("{ams_collector_pid_dir}/ambari-metrics-collector.pid")
-        check_process_status(pid_file)
+        check_service_status(name='collector')
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_monitor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_monitor.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_monitor.py
index 23bdf39..d53a834 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_monitor.py
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/metric_monitor.py
@@ -21,7 +21,7 @@ limitations under the License.
 from resource_management import *
 from ams import ams
 from ams_service import ams_service
-from hbase import hbase
+from status import check_service_status
 
 class AmsMonitor(Script):
   def install(self, env):
@@ -52,8 +52,7 @@ class AmsMonitor(Script):
   def status(self, env):
     import status_params
     env.set_params(status_params)
-    pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-monitor.pid")
-    check_process_status(pid_file)
+    check_service_status(name='monitor')
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params.py
index db08fea..84c2dce 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params.py
@@ -21,17 +21,20 @@ limitations under the License.
 from functions import calc_xmn_from_xms
 from resource_management import *
 import status_params
+from ambari_commons import OSCheck
 
+if OSCheck.is_windows_family():
+  from params_windows import *
+else:
+  from params_linux import *
 # server configurations
 config = Script.get_config()
 exec_tmp_dir = Script.get_tmp_dir()
 
 #AMS data
-ams_user = status_params.ams_user
 ams_pid_dir = status_params.ams_collector_pid_dir
 
 ams_collector_script = "/usr/sbin/ambari-metrics-collector"
-ams_collector_conf_dir = "/etc/ambari-metrics-collector/conf"
 ams_collector_pid_dir = status_params.ams_collector_pid_dir
 ams_collector_hosts = default("/clusterHostInfo/metric_collector_hosts", [])
 ams_collector_host_single = ams_collector_hosts[0] #TODO cardinality is 1+ so we can have more than one host
@@ -43,38 +46,14 @@ pass
 ams_collector_log_dir = config['configurations']['ams-env']['ams_collector_log_dir']
 ams_monitor_log_dir = config['configurations']['ams-env']['ams_monitor_log_dir']
 
-ams_monitor_conf_dir = "/etc/ambari-metrics-monitor/conf/"
 ams_monitor_dir = "/usr/lib/python2.6/site-packages/resource_monitoring"
 ams_monitor_pid_dir = status_params.ams_monitor_pid_dir
 ams_monitor_script = "/usr/sbin/ambari-metrics-monitor"
 
-#RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
-#hadoop params
-if rpm_version is not None:
-#RPM versioning support
-  rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
-  hadoop_native_lib = format("/usr/hdp/current/hadoop-client/lib/native/")
-  hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
-  daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
-  region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')
-  region_drainer = format('/usr/hdp/current/hbase/bin/draining_servers.rb')
-  hbase_cmd = format('/usr/hdp/current/hbase/bin/hbase')
-else:
-  hadoop_native_lib = format("/usr/lib/hadoop/lib/native")
-  hadoop_bin_dir = "/usr/bin"
-  daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
-  region_mover = "/usr/lib/hbase/bin/region_mover.rb"
-  region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
-  hbase_cmd = "/usr/lib/hbase/bin/hbase"
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-#hbase_conf_dir = "/etc/ams-hbase/conf"
-hbase_conf_dir = "/etc/ams-hbase/conf"
+
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = config['commandParams']['mark_draining_only']
 hbase_included_hosts = config['commandParams']['included_hosts']
@@ -120,6 +99,7 @@ smoke_test_user = config['configurations']['cluster-env']['smokeuser']
 smokeuser_permissions = "RWXCA"
 service_check_data = functions.get_unique_id_and_date()
 user_group = config['configurations']['cluster-env']["user_group"]
+hadoop_user = "hadoop"
 
 if security_enabled:
   _hostname_lowercase = config['hostname'].lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_linux.py
new file mode 100644
index 0000000..fbb1e75
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_linux.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from ambari_commons import OSCheck
+import status_params
+config = Script.get_config()
+
+ams_collector_conf_dir = "/etc/ambari-metrics-collector/conf"
+ams_monitor_conf_dir = "/etc/ambari-metrics-monitor/conf/"
+ams_user = status_params.ams_user
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  #RPM versioning support
+  rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+if rpm_version is not None:
+  hadoop_native_lib = format("/usr/hdp/current/hadoop-client/lib/native/")
+  hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
+  daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
+  region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')
+  region_drainer = format('/usr/hdp/current/hbase/bin/draining_servers.rb')
+  hbase_cmd = format('/usr/hdp/current/hbase/bin/hbase')
+else:
+  hadoop_native_lib = format("/usr/lib/hadoop/lib/native")
+  hadoop_bin_dir = "/usr/bin"
+  daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
+  region_mover = "/usr/lib/hbase/bin/region_mover.rb"
+  region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
+  hbase_cmd = "/usr/lib/hbase/bin/hbase"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hbase_conf_dir = "/etc/ams-hbase/conf"

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_windows.py
new file mode 100644
index 0000000..1b3509e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/params_windows.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+
+from resource_management import *
+
+config = Script.get_config()
+
+ams_user = "hadoop"
+ams_collector_conf_dir = os.environ["COLLECTOR_CONF_DIR"]
+ams_collector_home_dir = os.environ["COLLECTOR_HOME"]
+ams_monitor_conf_dir = os.environ["MONITOR_CONF_DIR"]
+ams_monitor_home_dir = os.environ["MONITOR_HOME"]
+hadoop_native_lib = os.path.join(os.environ["HADOOP_HOME"], "bin")
+hadoop_bin_dir = os.path.join(os.environ["HADOOP_HOME"], "bin")
+hbase_cmd = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "bin", "hbase.cmd")
+hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf")
+hbase_conf_dir = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/service_mapping.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/service_mapping.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/service_mapping.py
new file mode 100644
index 0000000..6a96c28
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/service_mapping.py
@@ -0,0 +1,21 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+collector_win_service_name = "AmbariMetricsCollector"
+monitor_win_service_name = "AmbariMetricsHostMonitoring"

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/status.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/status.py b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/status.py
new file mode 100644
index 0000000..99ad1ab
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/status.py
@@ -0,0 +1,38 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def check_service_status(name):
+  if name=='collector':
+    pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-collector.pid")
+  elif name == 'monitor':
+    pid_file = format("{ams_monitor_pid_dir}/ambari-metrics-monitor.pid")
+  check_process_status(pid_file)
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def check_service_status(name):
+  import service_mapping
+  if name=='collector':
+    check_windows_service_status(service_mapping.collector_win_service_name)
+  elif name == 'monitor':
+    check_windows_service_status(service_mapping.monitor_win_service_name)

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
index de38931..b8f052b 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
@@ -21,61 +21,10 @@
 -->
 
 <configuration>
-    <!-- metrics sink properties -->
-    <property>
-      <name>sink_database</name>
-      <value>Existing MSSQL Server database with sql auth</value>
-    </property>
-
-    <property>
-      <name>sink.dbservername</name>
-      <value/>
-    </property>
-
-    <property>
-      <name>sink.dblogin</name>
-      <value>hadoop</value>
-      <description>
-        DB user name.
-      </description>
-    </property>
-
-    <property>
-      <name>sink.dbpassword</name>
-      <value/>
-      <description>
-        DB password.
-      </description>
-    </property>
-
-    <property>
-      <name>sink.db.schema.name</name>
-      <value>HadoopMetrics</value>
-    </property>
-
-    <property>
-      <name>sink.jdbc.driver</name>
-      <value>com.microsoft.sqlserver.jdbc.SQLServerDriver</value>
-      <description>
-        JDBC driver class.
-      </description>
-    </property>
-
-    <property>
-      <name>sink.jdbc.url</name>
-      <value/>
-      <description>
-        JDBC URL.
-      </description>
-    </property>
-
     <property>
       <name>hadoop.user.name</name>
       <value>hadoop</value>
     </property>
-
-
-
     <property>
         <name>security_enabled</name>
         <value>false</value>
@@ -108,4 +57,4 @@
         <property-type>GROUP</property-type>
         <description>Hadoop user group.</description>
     </property>
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py
index 2c0adc4..07a3a00 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py
@@ -38,24 +38,6 @@ class AfterInstallHook(Hook):
               mode="f",
               configuration_attributes=params.config['configuration_attributes']['core-site']
     )
-    download_file(os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc4.jar"),
-                  os.path.join(params.hadoop_common_dir, "sqljdbc4.jar")
-    )
-    download_file(os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc_auth.dll"),
-                  os.path.join(params.hadoop_common_dir, "sqljdbc_auth.dll")
-    )
-    download_file(os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc4.jar"),
-                  os.path.join(params.hbase_lib_dir, "sqljdbc4.jar")
-    )
-    download_file(os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc_auth.dll"),
-                  os.path.join(params.hadoop_common_bin, "sqljdbc_auth.dll")
-    )
-    download_file(os.path.join(params.config['hostLevelParams']['jdk_location'], "metrics-sink-1.0.0.jar"),
-                  os.path.join(params.hadoop_common_dir, "metrics-sink-1.0.0.jar")
-    )
-    download_file(os.path.join(params.config['hostLevelParams']['jdk_location'], "metrics-sink-1.0.0.jar"),
-                  os.path.join(params.hbase_lib_dir, "metrics-sink-1.0.0.jar")
-    )
 
     File(format("{params.hadoop_install_root}/cluster.properties"),
            content=Template("cluster.properties.j2"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/hook.py
index a81a09a..00fc27f 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/hook.py
@@ -28,6 +28,15 @@ class BeforeStartHook(Hook):
     self.run_custom_hook('before-ANY')
     self.run_custom_hook('after-INSTALL')
     env.set_params(params)
+    if params.has_metric_collector:
+      File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+           owner=params.hadoop_user,
+           content=Template("hadoop-metrics2.properties.j2")
+      )
+      File(os.path.join(params.hbase_conf_dir, "hadoop-metrics2-hbase.properties"),
+           owner=params.hadoop_user,
+           content=Template("hadoop-metrics2-hbase.properties.j2")
+      )
 
 if __name__ == "__main__":
   BeforeStartHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c202998/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/params.py
index eee2c2e..ab960b1 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/before-START/scripts/params.py
@@ -18,5 +18,21 @@ limitations under the License.
 """
 
 from resource_management import *
-
+import nturl2path
 config = Script.get_config()
+ams_collector_hosts = default("/clusterHostInfo/metric_collector_hosts", [])
+has_metric_collector = not len(ams_collector_hosts) == 0
+if has_metric_collector:
+  metric_collector_host = ams_collector_hosts[0]
+  metric_collector_port = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:8188")
+  if metric_collector_port and metric_collector_port.find(':') != -1:
+    metric_collector_port = metric_collector_port.split(':')[1]
+  pass
+  sink_home = os.environ["SINK_HOME"]
+  timeline_plugin_url = "file:"+nturl2path.pathname2url(os.path.join(sink_home, "hadoop-sink", "ambari-metrics-hadoop-sink.jar"))
+
+
+
+hadoop_conf_dir = os.environ["HADOOP_CONF_DIR"]
+hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
+hadoop_user = "hadoop"
\ No newline at end of file