You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2018/09/27 09:28:37 UTC

[ambari] branch trunk updated: [AMBARI-24695] Remove ambari-metrics from ambari repository. (#2388)

This is an automated email from the ASF dual-hosted git repository.

oleewere pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new ea728ac  [AMBARI-24695] Remove ambari-metrics from ambari repository. (#2388)
ea728ac is described below

commit ea728ace0a0054359c54476a986fdda15433e1af
Author: avijayanhwx <av...@hortonworks.com>
AuthorDate: Thu Sep 27 02:28:32 2018 -0700

    [AMBARI-24695] Remove ambari-metrics from ambari repository. (#2388)
    
    * [AMBARI-24695] Remove ambari-metrics from ambari repository.
    
    * [AMBARI-24695] Remove ambari-metrics from ambari repository (2).
---
 .../src/main/resources/ui/admin-web/bower.json     |    2 +-
 ambari-metrics/ambari-metrics-assembly/pom.xml     | 1338 --------
 .../src/main/assembly/collector-windows-choco.xml  |   51 -
 .../src/main/assembly/collector-windows.xml        |  112 -
 .../src/main/assembly/collector.xml                |   84 -
 .../src/main/assembly/grafana.xml                  |   59 -
 .../src/main/assembly/monitor-windows-choco.xml    |   51 -
 .../src/main/assembly/monitor-windows.xml          |   89 -
 .../src/main/assembly/monitor.xml                  |   78 -
 .../src/main/assembly/sink-windows-choco.xml       |   51 -
 .../src/main/assembly/sink-windows.xml             |   69 -
 .../src/main/assembly/sink.xml                     |   73 -
 .../collector/ambari-metrics-collector.nuspec      |   26 -
 .../package/choco/collector/chocolateyinstall.ps1  |   94 -
 .../choco/collector/chocolateyuninstall.ps1        |   69 -
 .../choco/monitor/ambari-metrics-monitor.nuspec    |   26 -
 .../package/choco/monitor/chocolateyinstall.ps1    |   93 -
 .../package/choco/monitor/chocolateyuninstall.ps1  |   69 -
 .../choco/sink/ambari-metrics-hadoop-sink.nuspec   |   26 -
 .../main/package/choco/sink/chocolateyinstall.ps1  |   91 -
 .../package/choco/sink/chocolateyuninstall.ps1     |   69 -
 .../src/main/package/deb/control/control           |   22 -
 .../src/main/package/deb/control/postinst          |   36 -
 .../src/main/package/deb/control/preinst           |   45 -
 .../src/main/package/deb/control/prerm             |   28 -
 .../src/main/package/rpm/sink/postinstall.sh       |   36 -
 .../src/main/package/rpm/sink/preinstall.sh        |   45 -
 ambari-metrics/ambari-metrics-common/pom.xml       |  231 --
 .../sink/timeline/AbstractTimelineMetricsSink.java |  754 -----
 .../metrics2/sink/timeline/AggregationResult.java  |   60 -
 .../metrics2/sink/timeline/AppCookieManager.java   |  219 --
 .../metrics2/sink/timeline/ContainerMetric.java    |  218 --
 .../metrics2/sink/timeline/MetadataException.java  |   28 -
 .../metrics2/sink/timeline/MetricAggregate.java    |  110 -
 .../sink/timeline/MetricClusterAggregate.java      |   73 -
 .../sink/timeline/MetricHostAggregate.java         |   81 -
 .../MetricsSinkInitializationException.java        |   25 -
 .../metrics2/sink/timeline/PostProcessingUtil.java |  164 -
 .../hadoop/metrics2/sink/timeline/Precision.java   |   78 -
 .../timeline/PrecisionLimitExceededException.java  |   36 -
 .../sink/timeline/SingleValuedTimelineMetric.java  |   93 -
 .../metrics2/sink/timeline/TimelineMetric.java     |  224 --
 .../sink/timeline/TimelineMetricMetadata.java      |  201 --
 .../sink/timeline/TimelineMetricUtils.java         |   65 -
 .../TimelineMetricWithAggregatedValues.java        |   65 -
 .../metrics2/sink/timeline/TimelineMetrics.java    |  123 -
 .../hadoop/metrics2/sink/timeline/TopNConfig.java  |   70 -
 .../sink/timeline/UnableToConnectException.java    |   46 -
 .../availability/MetricCollectorHAHelper.java      |  109 -
 .../MetricCollectorUnavailableException.java       |   24 -
 ...etricSinkWriteShardHostnameHashingStrategy.java |   60 -
 .../availability/MetricSinkWriteShardStrategy.java |   24 -
 .../sink/timeline/cache/TimelineMetricsCache.java  |  219 --
 .../sink/timeline/configuration/Configuration.java |   62 -
 .../apache/hadoop/metrics2/sink/util/Servers.java  |  111 -
 .../timeline/AbstractTimelineMetricSinkTest.java   |  240 --
 .../sink/timeline/AppCookieManagerTest.java        |   52 -
 .../availability/MetricCollectorHATest.java        |  211 --
 .../availability/ShardingStrategyTest.java         |   64 -
 .../timeline/cache/HandleConnectExceptionTest.java |  243 --
 .../timeline/cache/PostProcessingUtilTest.java     |  113 -
 .../timeline/cache/TimelineMetricsCacheTest.java   |  145 -
 ambari-metrics/ambari-metrics-flume-sink/pom.xml   |  175 --
 .../src/main/assemblies/empty.xml                  |   21 -
 .../src/main/assemblies/jar-with-common.xml        |   35 -
 .../src/main/conf/flume-metrics2.properties.j2     |   31 -
 .../sink/flume/FlumeTimelineMetricsSink.java       |  272 --
 .../sink/flume/FlumeTimelineMetricsSinkTest.java   |  172 --
 ambari-metrics/ambari-metrics-grafana/README.md    |  281 --
 .../ambari-metrics/datasource.js                   | 1092 -------
 .../ambari-metrics/directives.js                   |   36 -
 .../ambari-metrics/partials/config.html            |   19 -
 .../ambari-metrics/partials/query.editor.html      |  176 --
 .../ambari-metrics/partials/query.options.html     |   42 -
 .../ambari-metrics/plugin.json                     |   14 -
 .../ambari-metrics/queryCtrl.js                    |  160 -
 .../conf/unix/ambari-metrics-grafana               |  191 --
 .../conf/unix/ams-grafana-env.sh                   |   29 -
 .../conf/unix/ams-grafana.ini                      |  255 --
 ambari-metrics/ambari-metrics-grafana/pom.xml      |  142 -
 .../screenshots/1-add-datasource.png               |  Bin 108602 -> 0 bytes
 .../screenshots/10-choose-hostname.png             |  Bin 298654 -> 0 bytes
 .../screenshots/11-choose-agg-rate-precision.png   |  Bin 276486 -> 0 bytes
 .../screenshots/12-change-panel-title.png          |  Bin 258642 -> 0 bytes
 .../screenshots/13-save-dashboard.png              |  Bin 262005 -> 0 bytes
 .../screenshots/14-change-timerange.png            |  Bin 310766 -> 0 bytes
 .../screenshots/15-change-units.png                |  Bin 322069 -> 0 bytes
 .../screenshots/16-display-style-graph-1.png       |  Bin 285467 -> 0 bytes
 .../screenshots/17-series-specific-override.png    |  Bin 302825 -> 0 bytes
 .../screenshots/18-override-time.png               |  Bin 255655 -> 0 bytes
 .../screenshots/19-edit-graph.png                  |  Bin 191904 -> 0 bytes
 .../screenshots/2-datasource-details.png           |  Bin 125313 -> 0 bytes
 .../screenshots/20-templating.png                  |  Bin 694376 -> 0 bytes
 .../screenshots/21-multi-templating.png            |  Bin 92034 -> 0 bytes
 .../screenshots/3-test-datasource.png              |  Bin 136121 -> 0 bytes
 .../screenshots/4-dashboard-dropdown.png           |  Bin 126964 -> 0 bytes
 .../screenshots/5-dashboard-graph-menu.png         |  Bin 146851 -> 0 bytes
 .../screenshots/6-graph-panels.png                 |  Bin 105383 -> 0 bytes
 .../screenshots/7-choose-datasource.png            |  Bin 246860 -> 0 bytes
 .../screenshots/8-choose-component.png             |  Bin 199123 -> 0 bytes
 .../screenshots/9-choose-metric.png                |  Bin 216473 -> 0 bytes
 .../screenshots/add-dashboard.png                  |  Bin 107965 -> 0 bytes
 .../screenshots/full-dashboard.png                 |  Bin 161956 -> 0 bytes
 .../src/main/assemblies/empty.xml                  |   21 -
 ambari-metrics/ambari-metrics-hadoop-sink/pom.xml  |  205 --
 .../src/main/assemblies/empty.xml                  |   21 -
 .../src/main/assemblies/jar-with-common.xml        |   38 -
 .../sink/timeline/HadoopTimelineMetricsSink.java   |  518 ----
 .../timeline/HadoopTimelineMetricsSinkTest.java    |  450 ---
 .../conf/unix/log4j.properties                     |   31 -
 .../conf/windows/log4j.properties                  |   29 -
 .../ambari-metrics-host-aggregator/pom.xml         |  169 -
 .../host/aggregator/AggregatorApplication.java     |  256 --
 .../host/aggregator/AggregatorWebService.java      |   56 -
 .../host/aggregator/TimelineMetricsHolder.java     |  123 -
 .../sink/timeline/AbstractMetricPublisher.java     |  169 -
 .../sink/timeline/AggregatedMetricsPublisher.java  |  108 -
 .../sink/timeline/RawMetricsPublisher.java         |   70 -
 .../host/aggregator/AggregatorApplicationTest.java |   55 -
 .../host/aggregator/AggregatorWebServiceTest.java  |  135 -
 .../host/aggregator/TimelineMetricsHolderTest.java |  108 -
 .../sink/timeline/AbstractMetricPublisherTest.java |   82 -
 .../timeline/AggregatedMetricsPublisherTest.java   |  155 -
 .../sink/timeline/RawMetricsPublisherTest.java     |  153 -
 .../conf/unix/ambari-metrics-monitor               |  226 --
 .../conf/unix/metric_groups.conf                   |   37 -
 .../conf/unix/metric_monitor.ini                   |   40 -
 .../conf/windows/ambari-metrics-monitor.cmd        |   17 -
 .../conf/windows/metric_groups.conf                |   19 -
 .../conf/windows/metric_monitor.ini                |   33 -
 .../ambari-metrics-host-monitoring/pom.xml         |  252 --
 .../src/main/package/rpm/preremove.sh              |   28 -
 .../src/main/python/__init__.py                    |   21 -
 .../src/main/python/amhm_service.py                |  189 --
 .../src/main/python/core/__init__.py               |   37 -
 .../src/main/python/core/aggregator.py             |  112 -
 .../src/main/python/core/application_metric_map.py |  189 --
 .../src/main/python/core/blacklisted_set.py        |   73 -
 .../src/main/python/core/config_reader.py          |  317 --
 .../src/main/python/core/controller.py             |  145 -
 .../src/main/python/core/emitter.py                |  222 --
 .../src/main/python/core/event_definition.py       |   84 -
 .../src/main/python/core/host_info.py              |  374 ---
 .../src/main/python/core/krberr.py                 |   42 -
 .../src/main/python/core/metric_collector.py       |   95 -
 .../src/main/python/core/security.py               |   98 -
 .../src/main/python/core/spnego_kerberos_auth.py   |  164 -
 .../src/main/python/core/stop_handler.py           |  139 -
 .../src/main/python/main.py                        |  108 -
 .../src/main/python/psutil/LICENSE                 |   27 -
 .../src/main/python/psutil/MANIFEST.in             |   14 -
 .../src/main/python/psutil/Makefile                |   77 -
 .../src/main/python/psutil/README                  |  270 --
 .../src/main/python/psutil/build.py                |   57 -
 .../src/main/python/psutil/docs/Makefile           |  177 --
 .../src/main/python/psutil/docs/README             |   15 -
 .../main/python/psutil/docs/_static/copybutton.js  |   57 -
 .../src/main/python/psutil/docs/_static/sidebar.js |  161 -
 .../python/psutil/docs/_template/globaltoc.html    |   12 -
 .../python/psutil/docs/_template/indexcontent.html |    4 -
 .../python/psutil/docs/_template/indexsidebar.html |   16 -
 .../main/python/psutil/docs/_template/page.html    |   66 -
 .../docs/_themes/pydoctheme/static/pydoctheme.css  |  187 --
 .../psutil/docs/_themes/pydoctheme/theme.conf      |   23 -
 .../src/main/python/psutil/docs/conf.py            |  253 --
 .../src/main/python/psutil/docs/index.rst          | 1247 --------
 .../src/main/python/psutil/docs/make.bat           |  242 --
 .../src/main/python/psutil/examples/disk_usage.py  |   63 -
 .../src/main/python/psutil/examples/free.py        |   42 -
 .../src/main/python/psutil/examples/iotop.py       |  178 --
 .../src/main/python/psutil/examples/killall.py     |   32 -
 .../src/main/python/psutil/examples/meminfo.py     |   69 -
 .../src/main/python/psutil/examples/netstat.py     |   65 -
 .../src/main/python/psutil/examples/nettop.py      |  165 -
 .../src/main/python/psutil/examples/pmap.py        |   58 -
 .../main/python/psutil/examples/process_detail.py  |  162 -
 .../src/main/python/psutil/examples/top.py         |  232 --
 .../src/main/python/psutil/examples/who.py         |   34 -
 .../src/main/python/psutil/make.bat                |  176 --
 .../src/main/python/psutil/psutil/__init__.py      | 1987 ------------
 .../src/main/python/psutil/psutil/_common.py       |  258 --
 .../src/main/python/psutil/psutil/_compat.py       |  433 ---
 .../src/main/python/psutil/psutil/_psbsd.py        |  389 ---
 .../src/main/python/psutil/psutil/_pslinux.py      | 1225 --------
 .../src/main/python/psutil/psutil/_psosx.py        |  341 --
 .../src/main/python/psutil/psutil/_psposix.py      |  157 -
 .../src/main/python/psutil/psutil/_pssunos.py      |  533 ----
 .../src/main/python/psutil/psutil/_psutil_bsd.c    | 2212 -------------
 .../src/main/python/psutil/psutil/_psutil_bsd.h    |   51 -
 .../src/main/python/psutil/psutil/_psutil_common.c |   37 -
 .../src/main/python/psutil/psutil/_psutil_common.h |   10 -
 .../src/main/python/psutil/psutil/_psutil_linux.c  |  510 ---
 .../src/main/python/psutil/psutil/_psutil_linux.h  |   20 -
 .../src/main/python/psutil/psutil/_psutil_osx.c    | 1881 ------------
 .../src/main/python/psutil/psutil/_psutil_osx.h    |   41 -
 .../src/main/python/psutil/psutil/_psutil_posix.c  |  128 -
 .../src/main/python/psutil/psutil/_psutil_posix.h  |   10 -
 .../src/main/python/psutil/psutil/_psutil_sunos.c  | 1290 --------
 .../src/main/python/psutil/psutil/_psutil_sunos.h  |   27 -
 .../main/python/psutil/psutil/_psutil_windows.c    | 3241 --------------------
 .../main/python/psutil/psutil/_psutil_windows.h    |   70 -
 .../src/main/python/psutil/psutil/_pswindows.py    |  485 ---
 .../python/psutil/psutil/arch/bsd/process_info.c   |  285 --
 .../python/psutil/psutil/arch/bsd/process_info.h   |   15 -
 .../python/psutil/psutil/arch/osx/process_info.c   |  293 --
 .../python/psutil/psutil/arch/osx/process_info.h   |   16 -
 .../main/python/psutil/psutil/arch/windows/glpi.h  |   41 -
 .../python/psutil/psutil/arch/windows/ntextapi.h   |  287 --
 .../psutil/psutil/arch/windows/process_handles.c   |  336 --
 .../psutil/psutil/arch/windows/process_handles.h   |   10 -
 .../psutil/psutil/arch/windows/process_info.c      |  443 ---
 .../psutil/psutil/arch/windows/process_info.h      |   17 -
 .../python/psutil/psutil/arch/windows/security.c   |  238 --
 .../python/psutil/psutil/arch/windows/security.h   |   17 -
 .../src/main/python/psutil/setup.py                |  198 --
 .../test/python/core/TestApplicationMetricMap.py   |  108 -
 .../src/test/python/core/TestEmitter.py            |  145 -
 .../src/test/python/core/TestHostInfo.py           |  348 ---
 .../src/test/python/core/TestMetricCollector.py    |   47 -
 .../src/test/python/unitTests.py                   |  140 -
 ambari-metrics/ambari-metrics-kafka-sink/pom.xml   |  219 --
 .../src/main/assemblies/empty.xml                  |   21 -
 .../src/main/assemblies/jar-with-common.xml        |   35 -
 .../sink/kafka/KafkaTimelineMetricsReporter.java   |  515 ----
 .../kafka/KafkaTimelineMetricsReporterMBean.java   |   25 -
 .../metrics2/sink/kafka/ScheduledReporter.java     |  218 --
 .../kafka/KafkaTimelineMetricsReporterTest.java    |  160 -
 .../metrics2/sink/kafka/ScheduledReporterTest.java |  104 -
 ambari-metrics/ambari-metrics-storm-sink/pom.xml   |  206 --
 .../src/main/assemblies/empty.xml                  |   21 -
 .../hadoop/metrics2/sink/storm/NumberUtil.java     |   38 -
 .../sink/storm/StormTimelineMetricsReporter.java   |  284 --
 .../sink/storm/StormTimelineMetricsSink.java       |  422 ---
 .../sink/storm/StormTimelineMetricsSinkTest.java   |  186 --
 .../conf/unix/ambari-metrics-collector             |  496 ---
 .../conf/unix/ams-env.sh                           |   33 -
 .../conf/unix/ams-site.xml                         |  385 ---
 .../conf/unix/amshbase_metrics_whitelist           |  162 -
 .../conf/unix/hbase-site.xml                       |  280 --
 .../conf/unix/log4j.properties                     |   31 -
 .../conf/unix/metrics_whitelist                    |  654 ----
 .../conf/unix/sqlline/log4j.properties             |   76 -
 .../conf/unix/sqlline/phoenix_utils.py             |  192 --
 .../conf/unix/sqlline/sqlline.py                   |  105 -
 .../conf/windows/ambari-metrics-collector.cmd      |   17 -
 .../conf/windows/ams-env.cmd                       |   16 -
 .../conf/windows/ams-site.xml                      |   25 -
 .../conf/windows/ams.properties                    |   17 -
 .../conf/windows/amshbase_metrics_whitelist        |  162 -
 .../conf/windows/log4j.properties                  |   29 -
 .../conf/windows/metrics_whitelist                 |  654 ----
 .../ambari-metrics-timelineservice/pom.xml         | 1016 ------
 .../src/main/assemblies/empty.xml                  |   21 -
 .../src/main/assemblies/simulator.xml              |   70 -
 .../src/main/conf/hbase-site-metrics-service.xml   |   80 -
 .../src/main/conf/simulator-log4j.xml              |   45 -
 .../ambari/metrics/AMSApplicationServer.java       |  143 -
 .../metrics/core/loadsimulator/LoadRunner.java     |  154 -
 .../core/loadsimulator/MetricsLoadSimulator.java   |  138 -
 .../core/loadsimulator/MetricsSenderWorker.java    |   60 -
 .../metrics/core/loadsimulator/data/AppID.java     |   45 -
 .../core/loadsimulator/data/AppMetrics.java        |   47 -
 .../loadsimulator/data/ApplicationInstance.java    |   58 -
 .../loadsimulator/data/HostMetricsGenerator.java   |   61 -
 .../metrics/core/loadsimulator/data/Metric.java    |   71 -
 .../data/MetricsGeneratorConfigurer.java           |   93 -
 .../core/loadsimulator/net/MetricsSender.java      |   31 -
 .../core/loadsimulator/net/RestMetricsSender.java  |   92 -
 .../loadsimulator/net/StdOutMetricsSender.java     |   56 -
 .../metrics/core/loadsimulator/net/UrlService.java |  100 -
 .../metrics/core/loadsimulator/util/Json.java      |   62 -
 .../loadsimulator/util/RandomMetricsProvider.java  |   39 -
 .../core/loadsimulator/util/TimeStampProvider.java |   51 -
 .../metrics/core/timeline/FunctionUtils.java       |   47 -
 .../core/timeline/HBaseTimelineMetricsService.java |  573 ----
 .../core/timeline/MetricsCacheCommitterThread.java |   38 -
 .../MetricsSystemInitializationException.java      |   41 -
 .../core/timeline/PhoenixHBaseAccessor.java        | 2054 -------------
 .../core/timeline/TimelineMetricConfiguration.java |  743 -----
 .../timeline/TimelineMetricDistributedCache.java   |   32 -
 .../timeline/TimelineMetricServiceSummary.java     |   74 -
 .../timeline/TimelineMetricSplitPointComputer.java |  240 --
 .../metrics/core/timeline/TimelineMetricStore.java |  125 -
 .../core/timeline/TimelineMetricStoreWatcher.java  |  126 -
 .../timeline/TimelineMetricsAggregatorSink.java    |   60 -
 .../core/timeline/TimelineMetricsFilter.java       |  198 --
 .../core/timeline/TimelineMetricsIgniteCache.java  |  326 --
 .../core/timeline/TransientMetricReadHelper.java   |  139 -
 .../aggregators/AbstractTimelineAggregator.java    |  474 ---
 .../core/timeline/aggregators/AggregatorUtils.java |  255 --
 .../timeline/aggregators/CustomDownSampler.java    |   44 -
 .../timeline/aggregators/DownSamplerUtils.java     |  122 -
 .../aggregators/EventMetricDownSampler.java        |   84 -
 .../core/timeline/aggregators/Function.java        |  213 --
 .../aggregators/TimelineClusterMetric.java         |   97 -
 .../aggregators/TimelineMetricAggregator.java      |   59 -
 .../TimelineMetricAggregatorFactory.java           |  529 ----
 .../aggregators/TimelineMetricAppAggregator.java   |  192 --
 .../TimelineMetricClusterAggregator.java           |  152 -
 .../TimelineMetricClusterAggregatorSecond.java     |  295 --
 ...tricClusterAggregatorSecondWithCacheSource.java |  104 -
 .../TimelineMetricFilteringHostAggregator.java     |   94 -
 .../aggregators/TimelineMetricHostAggregator.java  |  125 -
 .../aggregators/TimelineMetricReadHelper.java      |  184 --
 .../core/timeline/aggregators/TopNDownSampler.java |  112 -
 .../v2/TimelineMetricClusterAggregator.java        |   93 -
 .../v2/TimelineMetricFilteringHostAggregator.java  |  119 -
 .../v2/TimelineMetricHostAggregator.java           |   77 -
 .../availability/AggregationTaskRunner.java        |  141 -
 .../timeline/availability/CheckpointManager.java   |   95 -
 .../availability/MetricCollectorHAController.java  |  330 --
 .../OnlineOfflineStateModelFactory.java            |   69 -
 .../discovery/TimelineMetricHostMetadata.java      |   60 -
 .../discovery/TimelineMetricMetadataKey.java       |   80 -
 .../discovery/TimelineMetricMetadataManager.java   |  863 ------
 .../discovery/TimelineMetricMetadataSync.java      |  227 --
 ...ractTimelineMetricsSeriesAggregateFunction.java |   98 -
 .../timeline/function/SeriesAggregateFunction.java |   42 -
 .../TimelineMetricsSeriesAggregateFunction.java    |   25 -
 ...elineMetricsSeriesAggregateFunctionFactory.java |   41 -
 .../TimelineMetricsSeriesAvgAggregateFunction.java |   39 -
 .../TimelineMetricsSeriesMaxAggregateFunction.java |   41 -
 .../TimelineMetricsSeriesMinAggregateFunction.java |   41 -
 .../TimelineMetricsSeriesSumAggregateFunction.java |   39 -
 .../metrics/core/timeline/query/Condition.java     |   52 -
 .../core/timeline/query/ConditionBuilder.java      |  156 -
 .../core/timeline/query/ConnectionProvider.java    |   29 -
 .../core/timeline/query/DefaultCondition.java      |  322 --
 .../timeline/query/DefaultPhoenixDataSource.java   |   90 -
 .../core/timeline/query/EmptyCondition.java        |  174 --
 .../timeline/query/PhoenixConnectionProvider.java  |   31 -
 .../core/timeline/query/PhoenixTransactSQL.java    | 1106 -------
 .../query/SplitByMetricNamesCondition.java         |  194 --
 .../metrics/core/timeline/query/TopNCondition.java |  162 -
 .../timeline/query/TransientMetricCondition.java   |  206 --
 .../core/timeline/sink/DefaultFSSinkProvider.java  |  153 -
 .../core/timeline/sink/ExternalMetricsSink.java    |   48 -
 .../core/timeline/sink/ExternalSinkProvider.java   |   35 -
 .../core/timeline/sink/HttpSinkProvider.java       |  231 --
 .../core/timeline/sink/KafkaSinkProvider.java      |  118 -
 .../DefaultInternalMetricsSourceProvider.java      |   42 -
 .../timeline/source/InternalMetricsSource.java     |   30 -
 .../timeline/source/InternalSourceProvider.java    |   39 -
 .../core/timeline/source/RawMetricsSource.java     |   85 -
 .../source/cache/InternalMetricCacheKey.java       |  109 -
 .../source/cache/InternalMetricCacheValue.java     |   37 -
 .../source/cache/InternalMetricsCache.java         |  229 --
 .../source/cache/InternalMetricsCacheProvider.java |   48 -
 .../cache/InternalMetricsCacheSizeOfEngine.java    |  148 -
 .../upgrade/core/AbstractPhoenixMetricsCopier.java |  164 -
 .../upgrade/core/MetricsDataMigrationLauncher.java |  328 --
 .../upgrade/core/PhoenixClusterMetricsCopier.java  |   74 -
 .../upgrade/core/PhoenixHostMetricsCopier.java     |   77 -
 .../timeline/uuid/HashBasedUuidGenStrategy.java    |  225 --
 .../core/timeline/uuid/MD5UuidGenStrategy.java     |   60 -
 .../core/timeline/uuid/MetricUuidGenStrategy.java  |   40 -
 .../timeline/uuid/Murmur3HashUuidGenStrategy.java  |   54 -
 .../core/timeline/uuid/TimelineMetricUuid.java     |   55 -
 .../records/ApplicationAttemptFinishData.java      |   95 -
 .../records/ApplicationAttemptHistoryData.java     |  171 --
 .../records/ApplicationAttemptStartData.java       |   82 -
 .../metrics/records/ApplicationFinishData.java     |   94 -
 .../metrics/records/ApplicationHistoryData.java    |  213 --
 .../metrics/records/ApplicationStartData.java      |  106 -
 .../metrics/records/ContainerFinishData.java       |   90 -
 .../metrics/records/ContainerHistoryData.java      |  182 --
 .../ambari/metrics/records/ContainerStartData.java |   92 -
 .../pb/ApplicationAttemptFinishDataPBImpl.java     |  239 --
 .../impl/pb/ApplicationAttemptStartDataPBImpl.java |  208 --
 .../impl/pb/ApplicationFinishDataPBImpl.java       |  226 --
 .../impl/pb/ApplicationStartDataPBImpl.java        |  229 --
 .../records/impl/pb/ContainerFinishDataPBImpl.java |  204 --
 .../records/impl/pb/ContainerStartDataPBImpl.java  |  258 --
 .../metrics/timeline/GenericObjectMapper.java      |  135 -
 .../ambari/metrics/timeline/NameValuePair.java     |   59 -
 .../ambari/metrics/timeline/TimelineReader.java    |  155 -
 .../ambari/metrics/timeline/TimelineStore.java     |   29 -
 .../ambari/metrics/timeline/TimelineWriter.java    |   46 -
 .../ambari/metrics/webapp/AMSController.java       |   37 -
 .../apache/ambari/metrics/webapp/AMSWebApp.java    |   42 -
 .../ambari/metrics/webapp/JAXBContextResolver.java |   64 -
 .../ambari/metrics/webapp/TimelineWebServices.java |  539 ----
 .../python/ambari_metrics_collector/__init__.py    |   21 -
 .../python/ambari_metrics_collector/properties.py  |  223 --
 .../serviceConfiguration.py                        |  152 -
 .../src/main/python/amc_service.py                 |  171 --
 .../src/main/python/embedded_hbase_service.py      |  202 --
 .../src/main/python/main.py                        |  214 --
 .../main/resources/metrics_def/AMBARI_SERVER.dat   |   40 -
 .../src/main/resources/metrics_def/AMS-HBASE.dat   |  245 --
 .../resources/metrics_def/AMSSMOKETESTFAKE.DAT     |    1 -
 .../resources/metrics_def/AMSSMOKETESTFAKE.dat     |    1 -
 .../src/main/resources/metrics_def/DATANODE.dat    |  161 -
 .../main/resources/metrics_def/FLUME_HANDLER.dat   |   17 -
 .../main/resources/metrics_def/HBASE_MASTER.dat    |  253 --
 .../resources/metrics_def/HBASE_REGIONSERVER.dat   |  600 ----
 .../main/resources/metrics_def/HIVEMETASTORE.dat   |  181 --
 .../src/main/resources/metrics_def/HIVESERVER2.dat |  117 -
 .../src/main/resources/metrics_def/HOST.dat        |   61 -
 .../resources/metrics_def/JOBHISTORYSERVER.dat     |   58 -
 .../main/resources/metrics_def/KAFKA_BROKER.dat    | 1103 -------
 .../src/main/resources/metrics_def/NAMENODE.dat    |  398 ---
 .../src/main/resources/metrics_def/NIMBUS.dat      |    7 -
 .../src/main/resources/metrics_def/NODEMANAGER.dat |   83 -
 .../main/resources/metrics_def/RESOURCEMANAGER.dat |  159 -
 .../metrics_def/TIMELINE_METRIC_STORE_WATCHER.DAT  |    1 -
 .../metrics_def/TIMELINE_METRIC_STORE_WATCHER.dat  |    1 -
 .../src/main/resources/scripts/ams_query.py        |  209 --
 .../src/main/resources/scripts/start.sh            |   30 -
 .../src/main/resources/scripts/start_slaves.sh     |   27 -
 .../src/main/resources/scripts/status_slaves.sh    |   22 -
 .../src/main/resources/scripts/stop.sh             |   32 -
 .../src/main/resources/scripts/stop_slaves.sh      |   26 -
 .../src/test/conf/ams-site.xml                     |   29 -
 .../src/test/conf/hadoop-policy.xml                |  134 -
 .../src/test/conf/hbase-site.xml                   |  245 --
 .../core/loadsimulator/data/TestAppMetrics.java    |  134 -
 .../core/loadsimulator/data/TestMetric.java        |   80 -
 .../jmetertest/jmetertest/AMSJMeterLoadTest.java   |  198 --
 .../jmetertest/jmetertest/AppGetMetric.java        |   57 -
 .../jmetertest/GetMetricRequestInfo.java           |   61 -
 .../jmetertest/jmetertest/JmeterTestPlanTask.java  |  276 --
 .../loadsimulator/net/TestRestMetricsSender.java   |   75 -
 .../loadsimulator/net/TestStdOutMetricsSender.java |   37 -
 .../util/TestRandomMetricsProvider.java            |   36 -
 .../loadsimulator/util/TestTimeStampProvider.java  |   51 -
 .../timeline/AbstractMiniHBaseClusterTest.java     |  367 ---
 .../AbstractPhoenixConnectionlessTest.java         |  111 -
 .../ambari/metrics/core/timeline/FunctionTest.java |   62 -
 .../timeline/HBaseTimelineMetricsServiceTest.java  |  136 -
 .../core/timeline/ITPhoenixHBaseAccessor.java      |  553 ----
 .../metrics/core/timeline/MetricTestHelper.java    |  130 -
 .../core/timeline/PhoenixHBaseAccessorTest.java    |  292 --
 .../timeline/StandaloneHBaseTestingUtility.java    |   39 -
 .../metrics/core/timeline/TestClusterSuite.java    |   34 -
 .../core/timeline/TestMetricHostAggregate.java     |   65 -
 .../core/timeline/TestPhoenixTransactSQL.java      |  659 ----
 .../core/timeline/TestTimelineMetricStore.java     |  133 -
 .../TimelineMetricSplitPointComputerTest.java      |  141 -
 .../timeline/TimelineMetricStoreWatcherTest.java   |  108 -
 .../TimelineMetricsAggregatorMemorySink.java       |  141 -
 .../core/timeline/TimelineMetricsFilterTest.java   |  259 --
 .../timeline/TimelineMetricsIgniteCacheTest.java   |  237 --
 .../metrics/core/timeline/TopNConditionTest.java   |  105 -
 .../AbstractTimelineAggregatorTest.java            |  181 --
 .../core/timeline/aggregators/DownSamplerTest.java |  117 -
 .../timeline/aggregators/ITClusterAggregator.java  |  761 -----
 .../timeline/aggregators/ITMetricAggregator.java   |  389 ---
 .../TimelineMetricClusterAggregatorSecondTest.java |  405 ---
 ...ClusterAggregatorSecondWithCacheSourceTest.java |  115 -
 .../MetricCollectorHAControllerTest.java           |  106 -
 .../timeline/discovery/TestMetadataManager.java    |  281 --
 .../core/timeline/discovery/TestMetadataSync.java  |  126 -
 ...TimelineMetricsSeriesAggregateFunctionTest.java |  188 --
 .../core/timeline/query/DefaultConditionTest.java  |  172 --
 .../core/timeline/source/RawMetricsSourceTest.java |  142 -
 .../timeline/uuid/MetricUuidGenStrategyTest.java   |  216 --
 .../metrics/timeline/TestGenericObjectMapper.java  |  102 -
 .../metrics/webapp/TestTimelineWebServices.java    |  118 -
 .../src/test/resources/hbase-default.xml           |   36 -
 .../src/test/resources/loadsimulator/README        |   65 -
 .../resources/loadsimulator/ams-jmeter.properties  |   56 -
 .../resources/loadsimulator/amsJmeterGraph.jmx     |  104 -
 .../test/resources/loadsimulator/jmeter.properties | 1172 -------
 .../resources/loadsimulator/saveservice.properties |  381 ---
 .../test/resources/test_data/full_whitelist.dat    | 1615 ----------
 .../test/resources/test_data/metric_blacklist.dat  |    2 -
 .../test/resources/test_data/metric_whitelist.dat  |    8 -
 .../test/resources/ui_metrics_def/AMS-HBASE.dat    |   26 -
 .../src/test/resources/ui_metrics_def/DATANODE.dat |    4 -
 .../resources/ui_metrics_def/FLUME_HANDLER.dat     |   63 -
 .../src/test/resources/ui_metrics_def/HBASE.dat    |   47 -
 .../src/test/resources/ui_metrics_def/HOST.dat     |   80 -
 .../test/resources/ui_metrics_def/KAFKA_BROKER.dat |   16 -
 .../src/test/resources/ui_metrics_def/NAMENODE.dat |   30 -
 .../src/test/resources/ui_metrics_def/NIMBUS.dat   |   28 -
 .../test/resources/ui_metrics_def/NODEMANAGER.dat  |   33 -
 .../resources/ui_metrics_def/RESOURCEMANAGER.dat   |   11 -
 ambari-metrics/pom.xml                             |  358 ---
 .../src/main/package/deb/control/control           |   21 -
 .../src/main/package/deb/control/postinst          |   15 -
 ambari-metrics/src/main/package/deb/control/postrm |   15 -
 .../src/main/package/deb/control/posttrm           |   15 -
 .../src/main/package/deb/control/preinst           |   15 -
 ambari-metrics/src/main/package/deb/control/prerm  |   15 -
 ambari-server/pom.xml                              |    3 +-
 pom.xml                                            |   24 -
 487 files changed, 3 insertions(+), 83943 deletions(-)

diff --git a/ambari-admin/src/main/resources/ui/admin-web/bower.json b/ambari-admin/src/main/resources/ui/admin-web/bower.json
index 5bbada9..8e2a6f5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/bower.json
+++ b/ambari-admin/src/main/resources/ui/admin-web/bower.json
@@ -23,4 +23,4 @@
   "resolutions": {
     "angular": "1.5.11"
   }
-}
+}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml b/ambari-metrics/ambari-metrics-assembly/pom.xml
deleted file mode 100644
index 4f36e5a..0000000
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ /dev/null
@@ -1,1338 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <parent>
-    <artifactId>ambari-metrics</artifactId>
-    <groupId>org.apache.ambari</groupId>
-    <version>2.0.0.0-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>ambari-metrics-assembly</artifactId>
-  <name>Ambari Metrics Assembly</name>
-  <packaging>pom</packaging>
-  <version>2.0.0.0-SNAPSHOT</version>
-  <description>Ambari Metrics Assembly</description>
-
-  <properties>
-    <collector.dir>${project.basedir}/../ambari-metrics-timelineservice</collector.dir>
-    <monitor.dir>${project.basedir}/../ambari-metrics-host-monitoring</monitor.dir>
-    <aggregator.dir>${project.basedir}/../ambari-metrics-host-aggregator</aggregator.dir>
-    <grafana.dir>${project.basedir}/../ambari-metrics-grafana</grafana.dir>
-    <hadoop-sink.dir>${project.basedir}/../ambari-metrics-hadoop-sink</hadoop-sink.dir>
-    <storm-sink.dir>${project.basedir}/../ambari-metrics-storm-sink</storm-sink.dir>
-    <flume-sink.dir>${project.basedir}/../ambari-metrics-flume-sink</flume-sink.dir>
-    <kafka-sink.dir>${project.basedir}/../ambari-metrics-kafka-sink</kafka-sink.dir>
-    <python.ver>python &gt;= 2.6</python.ver>
-    <python.devel>python-devel</python.devel>
-    <deb.publisher>Apache</deb.publisher>
-    <deb.section>universe/admin</deb.section>
-    <deb.architecture>i386 amd64</deb.architecture>
-    <deb.priority>extra</deb.priority>
-    <deb.python.ver>python (&gt;= 2.6)</deb.python.ver>
-    <deb.architecture>amd64</deb.architecture>
-    <deb.dependency.list>${deb.python.ver},python-dev,gcc</deb.dependency.list>
-    <hadoop.sink.jar>ambari-metrics-hadoop-sink-with-common-${project.version}.jar</hadoop.sink.jar>
-    <storm.sink.jar>ambari-metrics-storm-sink-with-common-${project.version}.jar</storm.sink.jar>
-    <flume.sink.jar>ambari-metrics-flume-sink-with-common-${project.version}.jar</flume.sink.jar>
-    <kafka.sink.jar>ambari-metrics-kafka-sink-with-common-${project.version}.jar</kafka.sink.jar>
-  </properties>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <version>1.8</version>
-        <executions>
-          <execution>
-            <id>parse-version</id>
-            <phase>validate</phase>
-            <goals>
-              <goal>parse-version</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>regex-property</id>
-            <goals>
-              <goal>regex-property</goal>
-            </goals>
-            <configuration>
-              <name>ambariVersion</name>
-              <value>${project.version}</value>
-              <regex>^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*</regex>
-              <replacement>$1.$2.$3.$4</replacement>
-              <failIfNoMatch>false</failIfNoMatch>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>collector</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <attach>false</attach>
-              <finalName>ambari-metrics-collector-${project.version}</finalName>
-              <appendAssemblyId>false</appendAssemblyId>
-              <descriptors>
-                <descriptor>${assemblydescriptor.collector}</descriptor>
-              </descriptors>
-              <tarLongFileMode>gnu</tarLongFileMode>
-            </configuration>
-          </execution>
-          <execution>
-            <id>monitor</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <attach>false</attach>
-              <finalName>ambari-metrics-monitor-${project.version}</finalName>
-              <appendAssemblyId>false</appendAssemblyId>
-              <descriptors>
-                <descriptor>${assemblydescriptor.monitor}</descriptor>
-              </descriptors>
-              <tarLongFileMode>gnu</tarLongFileMode>
-            </configuration>
-          </execution>
-          <execution>
-            <id>grafana</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <attach>false</attach>
-              <finalName>ambari-metrics-grafana-${project.version}</finalName>
-              <appendAssemblyId>false</appendAssemblyId>
-              <descriptors>
-                <descriptor>${assemblydescriptor.grafana}</descriptor>
-              </descriptors>
-              <tarLongFileMode>gnu</tarLongFileMode>
-            </configuration>
-          </execution>
-          <execution>
-            <id>hadoop-sink</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <attach>false</attach>
-              <finalName>ambari-metrics-hadoop-sink-${project.version}</finalName>
-              <appendAssemblyId>false</appendAssemblyId>
-              <descriptors>
-                <descriptor>${assemblydescriptor.sink}</descriptor>
-              </descriptors>
-              <tarLongFileMode>gnu</tarLongFileMode>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-  <profiles>
-    <profile>
-      <id>rpm</id>
-
-      <activation>
-        <property>
-          <name>build-rpm</name>
-        </property>
-      </activation>
-
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-resources-plugin</artifactId>
-            <version>2.7</version>
-
-            <executions>
-              <execution>
-                <id>copy-resources</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>copy-resources</goal>
-                </goals>
-                <configuration>
-                  <outputDirectory>${project.build.directory}/resources/rpm</outputDirectory>
-                  <resources>
-                    <resource>
-                      <directory>${project.basedir}/src/main/package/rpm</directory>
-                      <filtering>true</filtering>
-                    </resource>
-                  </resources>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>rpm-maven-plugin</artifactId>
-            <version>2.0.1</version>
-            <configuration>
-              <group>Development</group>
-              <needarch>x86_64</needarch>
-              <copyright>2012, Apache Software Foundation</copyright>
-              <version>${package-version}</version>
-              <release>${package-release}</release>
-
-              <defaultFilemode>644</defaultFilemode>
-              <defaultDirmode>755</defaultDirmode>
-              <defaultUsername>root</defaultUsername>
-              <defaultGroupname>root</defaultGroupname>
-            </configuration>
-            <executions>
-
-              <!--ambari-metrics-collector-->
-              <execution>
-                <id>ambari-metrics-collector</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>rpm</goal>
-                </goals>
-                <configuration>
-                  <name>ambari-metrics-collector</name>
-                  <copyright>2012, Apache Software Foundation</copyright>
-                  <group>Development</group>
-                  <description>Maven Recipe: RPM Package.</description>
-                  <autoRequires>false</autoRequires>
-                  <requires>
-                    <require>${python.ver}</require>
-                  </requires>
-
-                  <defaultFilemode>644</defaultFilemode>
-                  <defaultDirmode>755</defaultDirmode>
-                  <defaultUsername>root</defaultUsername>
-                  <defaultGroupname>root</defaultGroupname>
-
-                  <mappings>
-                    <mapping>
-                      <!--jars-->
-                      <directory>/usr/lib/ambari-metrics-collector/</directory>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/target/lib</location>
-                          <excludes>
-                            <exclude>*tests.jar</exclude>
-                            <exclude>findbugs*.jar</exclude>
-                            <exclude>jdk.tools*.jar</exclude>
-                          </excludes>
-                        </source>
-                        <source>
-                          <location>
-                            ${collector.dir}/target/ambari-metrics-timelineservice-${project.version}.jar
-                          </location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <!--embedded applications-->
-                      <directory>/usr/lib/ams-hbase/</directory>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/target/embedded/${hbase.folder}</location>
-                          <excludes>
-                            <exclude>bin/**</exclude>
-                            <exclude>bin/*</exclude>
-                            <exclude>lib/*tests.jar</exclude>
-                            <exclude>lib/findbugs*.jar</exclude>
-                            <exclude>lib/jdk.tools*.jar</exclude>
-                          </excludes>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ams-hbase/bin</directory>
-                      <filemode>755</filemode>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/target/embedded/${hbase.folder}/bin</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ams-hbase/lib/</directory>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/target/lib</location>
-                          <includes>
-                            <include>phoenix*.jar</include>
-                            <include>antlr*.jar</include>
-                          </includes>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ams-hbase/lib/hadoop-native/</directory>
-                      <sources>
-                        <source>
-                          <location>${project.build.directory}/ambari-metrics-collector-${project.version}/ambari-metrics-collector-${project.version}/hbase/lib/hadoop-native</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/sbin</directory>
-                      <filemode>755</filemode>
-                      <username>root</username>
-                      <groupname>root</groupname>
-                      <directoryIncluded>false</directoryIncluded>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/conf/unix/ambari-metrics-collector</location>
-                          <filter>false</filter>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ambari-metrics-collector/bin</directory>
-                      <filemode>755</filemode>
-                      <username>root</username>
-                      <groupname>root</groupname>
-                      <directoryIncluded>false</directoryIncluded>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/conf/unix/sqlline</location>
-                          <filter>false</filter>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/etc/ambari-metrics-collector/conf</directory>
-                      <configuration>true</configuration>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/conf/unix/ams-env.sh</location>
-                        </source>
-                        <source>
-                          <location>${collector.dir}/conf/unix/ams-site.xml</location>
-                        </source>
-                        <source>
-                          <location>${collector.dir}/conf/unix/log4j.properties</location>
-                        </source>
-                        <source>
-                          <location>${collector.dir}/conf/unix/metrics_whitelist</location>
-                        </source>
-                        <source>
-                          <location>${collector.dir}/conf/unix/amshbase_metrics_whitelist</location>
-                        </source>
-                        <source>
-                          <location>${collector.dir}/conf/unix/hbase-site.xml</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/etc/ams-hbase/conf</directory>
-                      <configuration>true</configuration>
-                      <sources>
-                        <source>
-                          <location>${collector.dir}/target/embedded/${hbase.folder}/conf</location>
-                          <includes>
-                            <include>*.*</include>
-                          </includes>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/var/run/ams-hbase</directory>
-                    </mapping>
-                    <mapping>
-                      <directory>/var/lib/ambari-metrics-collector</directory>
-                    </mapping>
-                  </mappings>
-                </configuration>
-              </execution>
-
-              <!--hadoop-sink-->
-              <execution>
-                <id>ambari-metrics-hadoop-sink</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>rpm</goal>
-                </goals>
-
-                <configuration>
-                  <name>ambari-metrics-hadoop-sink</name>
-                  <copyright>2012, Apache Software Foundation</copyright>
-                  <group>Development</group>
-                  <description>Maven Recipe: RPM Package.</description>
-
-                  <defaultDirmode>755</defaultDirmode>
-                  <defaultFilemode>644</defaultFilemode>
-                  <defaultUsername>root</defaultUsername>
-                  <defaultGroupname>root</defaultGroupname>
-                  <preinstallScriptlet>
-                    <scriptFile>${project.build.directory}/resources/rpm/sink/preinstall.sh</scriptFile>
-                    <fileEncoding>utf-8</fileEncoding>
-                  </preinstallScriptlet>
-                  <postinstallScriptlet>
-                    <scriptFile>${project.build.directory}/resources/rpm/sink/postinstall.sh</scriptFile>
-                    <fileEncoding>utf-8</fileEncoding>
-                  </postinstallScriptlet>
-
-                  <mappings>
-                    <mapping>
-                      <directory>/usr/lib/ambari-metrics-hadoop-sink</directory>
-                      <sources>
-                        <source>
-                          <location>${hadoop-sink.dir}/target/ambari-metrics-hadoop-sink-with-common-${project.version}.jar</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/flume/lib</directory>
-                      <sources>
-                        <source>
-                          <location>${flume-sink.dir}/target/ambari-metrics-flume-sink-with-common-${project.version}.jar</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/storm/lib</directory>
-                      <sources>
-                        <source>
-                          <location>${storm-sink.dir}/target/ambari-metrics-storm-sink-with-common-${project.version}.jar</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ambari-metrics-kafka-sink</directory>
-                      <sources>
-                        <source>
-                          <location>${kafka-sink.dir}/target/${kafka.sink.jar}</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ambari-metrics-kafka-sink/lib</directory>
-                      <sources>
-                        <source>
-                          <location>${kafka-sink.dir}/target/lib</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                  </mappings>
-                </configuration>
-              </execution>
-
-              <!--ambari-metrics-grafana-->
-              <execution>
-                <id>ambari-metrics-grafana</id>
-                <!-- unbinds rpm creation from maven lifecycle -->
-                <phase>package</phase>
-                <goals>
-                  <goal>rpm</goal>
-                </goals>
-                <configuration>
-                  <name>ambari-metrics-grafana</name>
-                  <group>Development</group>
-                  <needarch>x86_64</needarch>
-                  <autoRequires>false</autoRequires>
-                  <mappings>
-                    <mapping>
-                      <!--grafana-->
-                      <directory>/usr/lib/ambari-metrics-grafana/</directory>
-                      <sources>
-                        <source>
-                          <location>${grafana.dir}/target/grafana/${grafana.folder}</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/lib/ambari-metrics-grafana/bin</directory>
-                      <filemode>755</filemode>
-                      <sources>
-                        <source>
-                          <location>${grafana.dir}/target/grafana/${grafana.folder}/bin</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/sbin</directory>
-                      <filemode>755</filemode>
-                      <username>root</username>
-                      <groupname>root</groupname>
-                      <directoryIncluded>false</directoryIncluded>
-                      <sources>
-                        <source>
-                          <location>${grafana.dir}/conf/unix/ambari-metrics-grafana</location>
-                          <filter>false</filter>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/etc/ambari-metrics-grafana/conf</directory>
-                      <filemode>755</filemode>
-                      <username>root</username>
-                      <groupname>root</groupname>
-                      <directoryIncluded>false</directoryIncluded>
-                      <sources>
-                        <source>
-                          <location>${grafana.dir}/conf/unix/ams-grafana-env.sh</location>
-                          <filter>false</filter>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/etc/ambari-metrics-grafana/conf</directory>
-                      <configuration>true</configuration>
-                      <sources>
-                        <source>
-                          <location>${grafana.dir}/conf/unix/ams-grafana.ini</location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/var/run/ambari-metrics-grafana</directory>
-                    </mapping>
-                    <mapping>
-                      <directory>/var/lib/ambari-metrics-grafana</directory>
-                    </mapping>
-                    <mapping>
-                      <directory>/var/log/ambari-metrics-grafana</directory>
-                    </mapping>
-                  </mappings>
-                </configuration>
-              </execution>
-
-              <!--ambari-metrics-monitor-->
-              <execution>
-                <id>ambari-metrics-monitor</id>
-                <!-- unbinds rpm creation from maven lifecycle -->
-                <phase>package</phase>
-                <goals>
-                  <goal>rpm</goal>
-                </goals>
-                <configuration>
-                  <name>ambari-metrics-monitor</name>
-                  <group>Development</group>
-                  <needarch>x86_64</needarch>
-                  <autoRequires>false</autoRequires>
-                  <requires>
-                    <require>${python.ver}</require>
-                    <require>gcc</require>
-                    <require>${python.devel}</require>
-                  </requires>
-                  <preremoveScriptlet>
-                    <scriptFile>src/main/package/rpm/preremove.sh</scriptFile>
-                    <fileEncoding>utf-8</fileEncoding>
-                  </preremoveScriptlet>
-                  <mappings>
-                    <mapping>
-                      <directory>${resmonitor.install.dir}</directory>
-                      <username>root</username>
-                      <groupname>root</groupname>
-                      <sources>
-                        <source>
-                          <location>
-                            ${monitor.dir}/src/main/python/__init__.py
-                          </location>
-                        </source>
-                        <source>
-                          <location>
-                            ${monitor.dir}/src/main/python/main.py
-                          </location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>${resmonitor.install.dir}/core</directory>
-                      <sources>
-                        <source>
-                          <location>
-                            ${monitor.dir}/src/main/python/core
-                          </location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>${resmonitor.install.dir}/psutil</directory>
-                      <sources>
-                        <source>
-                          <location>
-                            ${monitor.dir}/src/main/python/psutil
-                          </location>
-                          <excludes>
-                            <exclude>build/**</exclude>
-                            <exclude>build/*</exclude>
-                          </excludes>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>${resmonitor.install.dir}/ambari_commons</directory>
-                      <sources>
-                        <source>
-                          <location>
-                            ${project.basedir}/../../ambari-common/src/main/python/ambari_commons
-                          </location>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/var/lib/ambari-metrics-monitor/lib</directory>
-                      <sources>
-                        <source>
-                          <location>
-                            ${aggregator.dir}/target/
-                          </location>
-                          <includes>
-                            <include>ambari-metrics-host-aggregator-${project.version}.jar</include>
-                          </includes>
-                        </source>
-                      </sources>
-                    </mapping>
-                    <mapping>
-                      <directory>/etc/ambari-metrics-monitor/conf</directory>
-                      <configuration>true</configuration>
-                    </mapping>
-                    <mapping>
-                      <directory>/usr/sbin</directory>
-                      <filemode>755</filemode>
-                      <username>root</username>
-                      <groupname>root</groupname>
-                      <directoryIncluded>false</directoryIncluded>
-                      <sources>
-                        <source>
-                          <location>
-                            ${monitor.dir}/conf/unix/ambari-metrics-monitor
-                          </location>
-                          <filter>true</filter>
-                        </source>
-                      </sources>
-                    </mapping>
-                  </mappings>
-                </configuration>
-              </execution>
-
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <profile>
-      <id>deb</id>
-
-      <activation>
-        <property>
-          <name>build-deb</name>
-        </property>
-      </activation>
-
-      <build>
-        <plugins>
-          <plugin>
-            <artifactId>maven-resources-plugin</artifactId>
-            <version>2.7</version>
-
-            <executions>
-              <execution>
-                <id>copy-resources</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>copy-resources</goal>
-                </goals>
-                <configuration>
-                  <outputDirectory>${project.build.directory}/resources/deb/control</outputDirectory>
-                  <resources>
-                    <resource>
-                      <directory>${project.basedir}/src/main/package/deb/control</directory>
-                      <excludes>
-                        <exclude>postinst</exclude>
-                      </excludes>
-                      <filtering>false</filtering>
-                    </resource>
-                    <resource>
-                      <directory>${project.basedir}/src/main/package/deb/control</directory>
-                      <includes>
-                        <include>postinst</include>
-                      </includes>
-                      <filtering>true</filtering>
-                    </resource>
-                  </resources>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <artifactId>jdeb</artifactId>
-            <groupId>org.vafer</groupId>
-            <version>1.0.1</version>
-            <executions>
-              <execution>
-                <phase>package</phase>
-                <goals>
-                  <goal>jdeb</goal>
-                </goals>
-              </execution>
-            </executions>
-            <configuration>
-              <controlDir>${project.build.directory}/resources/deb/control</controlDir>
-              <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}.deb</deb>
-              <dataSet>
-                <data>
-                  <type>file</type>
-                  <src>${monitor.dir}/src/main/python/__init__.py</src>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>${resmonitor.install.dir}</prefix>
-                    <!-- TODO: Figure out if file perms should be set -->
-                    <!--user>root</user>
-                    <group>root</group-->
-                    <filemode>755</filemode>
-                  </mapper>
-                </data>
-                <data>
-                  <type>file</type>
-                  <src>${monitor.dir}/src/main/python/main.py</src>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>${resmonitor.install.dir}</prefix>
-                    <filemode>755</filemode>
-                  </mapper>
-                </data>
-                <data>
-                  <type>directory</type>
-                  <src>${monitor.dir}/src/main/python/core</src>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>${resmonitor.install.dir}/core</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <type>directory</type>
-                  <src>${monitor.dir}/src/main/python/psutil</src>
-                  <excludes>build/**</excludes>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>${resmonitor.install.dir}/psutil</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <type>directory</type>
-                  <src>${project.basedir}/../../ambari-common/src/main/python/ambari_commons</src>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>${resmonitor.install.dir}/ambari_commons</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <type>template</type>
-                  <paths>
-                    <path>/etc/ambari-metrics-monitor/conf</path>
-                    <path>/etc/ambari-metrics-collector/conf</path>
-                    <path>/etc/ambari-metrics-grafana/conf</path>
-                    <path>/etc/ams-hbase/conf</path>
-                    <path>/var/run/ams-hbase</path>
-                    <path>/var/run/ambari-metrics-grafana</path>
-                    <path>/var/log/ambari-metrics-grafana</path>
-                    <path>/var/lib/ambari-metrics-collector</path>
-                    <path>/var/lib/ambari-metrics-monitor/lib</path>
-                    <path>/var/lib/ambari-metrics-grafana</path>
-                    <path>/usr/lib/ambari-metrics-hadoop-sink</path>
-                    <path>/usr/lib/ambari-metrics-kafka-sink</path>
-                    <path>/usr/lib/ambari-metrics-kafka-sink/lib</path>
-                    <path>/usr/lib/flume/lib</path>
-                    <path>/usr/lib/storm/lib</path>
-                  </paths>
-                </data>
-                <data>
-                  <src>${monitor.dir}/conf/unix/metric_groups.conf</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>/etc/ambari-metrics-monitor/conf</prefix>
-                    <filemode>644</filemode>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${monitor.dir}/conf/unix/metric_monitor.ini</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>/etc/ambari-metrics-monitor/conf</prefix>
-                    <filemode>644</filemode>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${monitor.dir}/conf/unix/ambari-metrics-monitor</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>/usr/sbin</prefix>
-                    <filemode>755</filemode>
-                  </mapper>
-                </data>
-
-                <!-- Metric collector -->
-
-                <data>
-                  <src>${collector.dir}/target/ambari-metrics-timelineservice-${project.version}.jar</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <dirmode>644</dirmode>
-                    <prefix>/usr/lib/ambari-metrics-collector</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/target/lib</src>
-                  <type>directory</type>
-                  <excludes>*tests.jar</excludes>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/usr/lib/ambari-metrics-collector</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/target/embedded/${hbase.folder}</src>
-                  <type>directory</type>
-                  <excludes>bin/**,bin/*,lib/*tests.jar</excludes>
-                  <mapper>
-                    <type>perm</type>
-                    <prefix>/usr/lib/ams-hbase</prefix>
-                    <filemode>644</filemode>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/target/embedded/${hbase.folder}/bin</src>
-                  <type>directory</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/usr/lib/ams-hbase/bin</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${project.build.directory}/ambari-metrics-collector-${project.version}/ambari-metrics-collector-${project.version}/hbase/lib/hadoop-native</src>
-                  <type>directory</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/usr/lib/ams-hbase/lib/hadoop-native</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/target/lib</src>
-                  <type>directory</type>
-                  <includes>phoenix*.jar,antlr*.jar</includes>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/usr/lib/ams-hbase/lib</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/ambari-metrics-collector</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/usr/sbin</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/ams-env.sh</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/ams-site.xml</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/log4j.properties</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/metrics_whitelist</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/amshbase_metrics_whitelist</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${collector.dir}/conf/unix/hbase-site.xml</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <type>directory</type>
-                  <src>${collector.dir}/target/embedded/${hbase.folder}/conf</src>
-                  <mapper>
-                    <type>perm</type>
-                    <dirmode>755</dirmode>
-                    <filemode>644</filemode>
-                    <prefix>/etc/ams-hbase/conf</prefix>
-                  </mapper>
-                </data>
-
-                <!-- Metric Grafana -->
-
-                <data>
-                  <src>${grafana.dir}/target/grafana/${grafana.folder}</src>
-                  <type>directory</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/usr/lib/ambari-metrics-grafana</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${grafana.dir}/target/grafana/${grafana.folder}/bin</src>
-                  <type>directory</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/usr/lib/ambari-metrics-grafana/bin</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${grafana.dir}/conf/unix/ambari-metrics-grafana</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/usr/sbin</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${grafana.dir}/conf/unix/ams-grafana-env.sh</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>755</filemode>
-                    <prefix>/etc/ambari-metrics-grafana/conf</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${grafana.dir}/conf/unix/ams-grafana.ini</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <prefix>/etc/ambari-metrics-grafana/conf</prefix>
-                  </mapper>
-                </data>
-
-                <!-- hadoop sink -->
-
-                <data>
-                  <src>${hadoop-sink.dir}/target/ambari-metrics-hadoop-sink-with-common-${project.version}.jar</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <dirmode>755</dirmode>
-                    <prefix>/usr/lib/ambari-metrics-hadoop-sink</prefix>
-                  </mapper>
-                </data>
-
-                <!-- flume sink -->
-
-                <data>
-                  <src>${flume-sink.dir}/target/${flume.sink.jar}</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <dirmode>755</dirmode>
-                    <prefix>/usr/lib/flume/lib</prefix>
-                  </mapper>
-                </data>
-
-                <!-- storm sinks -->
-
-                <data>
-                  <src>${storm-sink.dir}/target/${storm.sink.jar}</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <dirmode>755</dirmode>
-                    <prefix>/usr/lib/storm/lib</prefix>
-                  </mapper>
-                </data>
-
-                <!-- kafka sink -->
-
-                <data>
-                  <src>${kafka-sink.dir}/target/${kafka.sink.jar}</src>
-                  <type>file</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <dirmode>755</dirmode>
-                    <prefix>/usr/lib/ambari-metrics-kafka-sink</prefix>
-                  </mapper>
-                </data>
-                <data>
-                  <src>${kafka-sink.dir}/target/lib</src>
-                  <type>directory</type>
-                  <mapper>
-                    <type>perm</type>
-                    <filemode>644</filemode>
-                    <dirmode>755</dirmode>
-                    <prefix>/usr/lib/ambari-metrics-kafka-sink/lib</prefix>
-                  </mapper>
-                </data>
-              </dataSet>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <profile>
-      <id>linux</id>
-      <activation>
-        <os>
-          <family>unix</family>
-        </os>
-      </activation>
-      <properties>
-        <envClassifier>linux</envClassifier>
-        <dirsep>/</dirsep>
-        <pathsep>:</pathsep>
-        <executable.python>${project.basedir}/../ambari-common/src/main/unix/ambari-python-wrap</executable.python>
-        <executable.shell>sh</executable.shell>
-        <fileextension.shell>sh</fileextension.shell>
-        <fileextension.dot.shell-default></fileextension.dot.shell-default>
-        <assemblydescriptor.collector>src/main/assembly/collector.xml</assemblydescriptor.collector>
-        <assemblydescriptor.monitor>src/main/assembly/monitor.xml</assemblydescriptor.monitor>
-        <assemblydescriptor.sink>src/main/assembly/sink.xml</assemblydescriptor.sink>
-        <assemblydescriptor.grafana>src/main/assembly/grafana.xml</assemblydescriptor.grafana>
-
-        <packagingFormat>jar</packagingFormat>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <version>1.7</version>
-            <executions>
-              <execution>
-                <id>download-hadoop</id>
-                <phase>generate-resources</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target name="Downloading Hadoop">
-                    <mkdir dir="${project.build.directory}/embedded" />
-                    <get
-                      src="${hadoop.tar}"
-                      dest="${project.build.directory}/embedded/hadoop.tar.gz"
-                      usetimestamp="true"
-                      />
-                    <untar
-                      src="${project.build.directory}/embedded/hadoop.tar.gz"
-                      dest="${project.build.directory}/embedded"
-                      compression="gzip"
-                      />
-                    <!-- Resolving symlinks-->
-                    <move todir="${project.build.directory}/embedded/${hadoop.folder}/lib/native/">
-                      <fileset dir="${project.build.directory}/embedded/${hadoop.folder}/lib/native/"/>
-                      <mapper type="regexp" from="libsnappy.so.1.*.*" to="libsnappy.so.1"/>
-                    </move>
-                    <move
-                      file="${project.build.directory}/embedded/${hadoop.folder}/lib/native/libhdfs.so.0.0.0"
-                      tofile="${project.build.directory}/embedded/${hadoop.folder}/lib/native/libhdfs.so"
-                      />
-                    <move
-                      file="${project.build.directory}/embedded/${hadoop.folder}/lib/native/libhadoop.so.1.0.0"
-                      tofile="${project.build.directory}/embedded/${hadoop.folder}/lib/native/libhadoop.so"
-                      />
-                    <delete file="${project.build.directory}/embedded/${hadoop.folder}/lib/native/libsnappy.so"/>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <profile>
-      <id>windows</id>
-      <activation>
-        <os>
-          <family>win</family>
-        </os>
-      </activation>
-      <properties>
-        <envClassifier>win</envClassifier>
-        <dirsep>\</dirsep>
-        <pathsep>;</pathsep>
-        <executable.python>python</executable.python>
-        <executable.shell>cmd</executable.shell>
-        <fileextension.shell>cmd</fileextension.shell>
-        <fileextension.dot.shell-default>.cmd</fileextension.dot.shell-default>
-        <assemblydescriptor.collector>src/main/assembly/collector-windows.xml</assemblydescriptor.collector>
-        <assemblydescriptor.monitor>src/main/assembly/monitor-windows.xml</assemblydescriptor.monitor>
-        <assemblydescriptor.sink>src/main/assembly/sink-windows.xml</assemblydescriptor.sink>
-        <assemblydescriptor.collector.choco>src/main/assembly/collector-windows-choco.xml</assemblydescriptor.collector.choco>
-        <assemblydescriptor.monitor.choco>src/main/assembly/monitor-windows-choco.xml</assemblydescriptor.monitor.choco>
-        <assemblydescriptor.sink.choco>src/main/assembly/sink-windows-choco.xml</assemblydescriptor.sink.choco>
-        <packagingFormat>jar</packagingFormat>
-        <python.build.version>2.7</python.build.version>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <version>1.7</version>
-            <executions>
-              <execution>
-                <id>download-hadoop</id>
-                <phase>generate-resources</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target name="Downloading Hadoop">
-                    <mkdir dir="${project.build.directory}/embedded" />
-                    <get
-                      src="${hadoop.winpkg.zip}"
-                      dest="${project.build.directory}/embedded/hadoop.zip"
-                      usetimestamp="true"
-                      />
-                    <unzip
-                      src="${project.build.directory}/embedded/hadoop.zip"
-                      dest="${project.build.directory}/embedded/hadoop.temp"
-                      />
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <profile>
-      <id>choco</id>
-      <activation>
-        <os>
-          <family>Windows</family>
-        </os>
-      </activation>
-      <build>
-        <plugins>
-          <!-- choco package creation -->
-          <plugin>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>collector-choco-dir</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <attach>false</attach>
-                  <finalName>ambari-metrics-collector-${project.version}-choco</finalName>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <descriptors>
-                    <descriptor>${assemblydescriptor.collector.choco}</descriptor>
-                  </descriptors>
-                  <tarLongFileMode>gnu</tarLongFileMode>
-                </configuration>
-              </execution>
-              <execution>
-                <id>monitor-choco-dir</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <attach>false</attach>
-                  <finalName>ambari-metrics-monitor-${project.version}-choco</finalName>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <descriptors>
-                    <descriptor>${assemblydescriptor.monitor.choco}</descriptor>
-                  </descriptors>
-                  <tarLongFileMode>gnu</tarLongFileMode>
-                </configuration>
-              </execution>
-              <execution>
-                <id>hadoop-sink-choco-dir</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <attach>false</attach>
-                  <finalName>ambari-metrics-hadoop-sink-${project.version}-choco</finalName>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <descriptors>
-                    <descriptor>${assemblydescriptor.sink.choco}</descriptor>
-                  </descriptors>
-                  <tarLongFileMode>gnu</tarLongFileMode>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>exec-maven-plugin</artifactId>
-            <version>1.2.1</version>
-            <executions>
-              <execution>
-                <id>collector-choco-package</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>exec</goal>
-                </goals>
-                <configuration>
-                  <executable>choco.exe</executable>
-                  <arguments>
-                    <argument>pack</argument>
-                    <argument>--version=${project.version}</argument>
-                    <argument>${basedir}/target/ambari-metrics-collector-${project.version}-choco/ambari-metrics-collector.nuspec</argument>
-                  </arguments>
-                  <workingDirectory>target/ambari-metrics-collector-${project.version}-choco</workingDirectory>
-                </configuration>
-              </execution>
-              <execution>
-                <id>monitor-choco-package</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>exec</goal>
-                </goals>
-                <configuration>
-                  <executable>choco.exe</executable>
-                  <arguments>
-                    <argument>pack</argument>
-                    <argument>--version=${project.version}</argument>
-                    <argument>${basedir}/target/ambari-metrics-monitor-${project.version}-choco/ambari-metrics-monitor.nuspec</argument>
-                  </arguments>
-                  <workingDirectory>target/ambari-metrics-monitor-${project.version}-choco</workingDirectory>
-                </configuration>
-              </execution>
-              <execution>
-                <id>hadoop-sink-choco-package</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>exec</goal>
-                </goals>
-                <configuration>
-                  <executable>choco.exe</executable>
-                  <arguments>
-                    <argument>pack</argument>
-                    <argument>--version=${project.version}</argument>
-                    <argument>${basedir}/target/ambari-metrics-hadoop-sink-${project.version}-choco/ambari-metrics-hadoop-sink.nuspec</argument>
-                  </arguments>
-                  <workingDirectory>target/ambari-metrics-hadoop-sink-${project.version}-choco</workingDirectory>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
-
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-timelineservice</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-common</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-hadoop-sink</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-flume-sink</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-storm-sink</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-kafka-sink</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-host-monitoring</artifactId>
-      <version>${project.version}</version>
-      <type>pom</type>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-host-aggregator</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-  </dependencies>
-
-
-</project>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows-choco.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows-choco.xml
deleted file mode 100644
index af09d3f..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows-choco.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-  <id>choco</id>
-  <formats>
-    <format>dir</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <files>
-    <file>
-      <source>${project.build.directory}/ambari-metrics-collector-${artifact.version}.zip</source>
-      <outputDirectory>content</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/collector/ambari-metrics-collector.nuspec</source>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/collector/chocolateyinstall.ps1</source>
-      <outputDirectory>tools</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/collector/chocolateyuninstall.ps1</source>
-      <outputDirectory>tools</outputDirectory>
-    </file>
-  </files>
-  <fileSets>
-    <fileSet>
-      <directory>${basedir}/../../ambari-common/src/main/windows</directory>
-      <outputDirectory>modules</outputDirectory>
-      <includes>
-        <include>*.psm1</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml
deleted file mode 100644
index 8b7a021..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml
+++ /dev/null
@@ -1,112 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>collector</id>
-  <formats>
-    <format>dir</format>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>${collector.dir}/target/embedded/${hbase.winpkg.folder}</directory>
-      <outputDirectory>hbase</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/conf/windows</directory>
-      <outputDirectory>/</outputDirectory>
-      <includes>
-        <include>ambari-metrics-collector.cmd</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/conf/windows</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>ams.properties</include>
-        <include>ams-env.cmd</include>
-        <include>ams-site.xml</include>
-        <include>log4j.properties</include>
-        <include>metrics_whitelist</include>
-        <include>amshbase_metrics_whitelist</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/target/lib</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/src/main/python</directory>
-      <outputDirectory>/sbin</outputDirectory>
-      <includes>
-        <include>*.py</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/src/main/python/ambari_metrics_collector</directory>
-      <outputDirectory>/sbin/ambari_metrics_collector</outputDirectory>
-      <includes>
-        <include>*.py</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${project.basedir}/../../ambari-common/src/main/python/ambari_commons</directory>
-      <outputDirectory>/sbin/ambari_commons</outputDirectory>
-      <includes>
-        <include>*.py</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${project.basedir}/../../ambari-common/src/main/python/ambari_commons/resources</directory>
-      <outputDirectory>/sbin/ambari_commons/resources</outputDirectory>
-      <includes>
-        <include>*.json</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${project.build.directory}/embedded/hadoop.temp/resources/${hadoop.winpkg.folder}/bin</directory>
-      <outputDirectory>hbase/bin</outputDirectory>
-      <includes>
-        <include>*.dll</include>
-        <include>*.exe</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-  <dependencySets>
-    <dependencySet>
-      <unpack>false</unpack>
-      <outputDirectory>hbase/lib</outputDirectory>
-      <useProjectArtifact>false</useProjectArtifact>
-      <includes>
-        <include>org.antlr:antlr*</include>
-        <include>org.apache.phoenix:phoenix-core</include>
-      </includes>
-    </dependencySet>
-    <dependencySet>
-      <unpack>false</unpack>
-      <outputDirectory>lib</outputDirectory>
-      <useProjectArtifact>false</useProjectArtifact>
-      <includes>
-        <include>org.apache.ambari:ambari-metrics-timelineservice</include>
-      </includes>
-    </dependencySet>
-  </dependencySets>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml
deleted file mode 100644
index 2b94106..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml
+++ /dev/null
@@ -1,84 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>collector</id>
-  <formats>
-    <format>dir</format>
-    <format>tar.gz</format>
-  </formats>
-
-  <fileSets>
-    <fileSet>
-      <directory>${collector.dir}/target/embedded/${hbase.folder}</directory>
-      <outputDirectory>hbase</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/conf/unix</directory>
-      <outputDirectory>bin</outputDirectory>
-      <includes>
-        <include>ambari-metrics-collector</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/conf/unix</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>ams-env.sh</include>
-        <include>ams-site.xml</include>
-        <include>hbase-site.xml</include>
-        <include>log4j.properties</include>
-        <include>metrics_whitelist</include>
-        <include>amshbase_metrics_whitelist</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${collector.dir}/target/lib</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${project.build.directory}/embedded/${hadoop.folder}/lib/native</directory>
-      <outputDirectory>hbase/lib/hadoop-native</outputDirectory>
-    </fileSet>
-  </fileSets>
-
-  <dependencySets>
-    <dependencySet>
-      <fileMode>755</fileMode>
-      <unpack>false</unpack>
-      <outputDirectory>hbase/lib</outputDirectory>
-      <useProjectArtifact>false</useProjectArtifact>
-      <includes>
-        <include>org.antlr:antlr*</include>
-        <include>org.apache.phoenix:phoenix-core</include>
-      </includes>
-    </dependencySet>
-    <dependencySet>
-      <fileMode>755</fileMode>
-      <unpack>false</unpack>
-      <outputDirectory>lib</outputDirectory>
-      <useProjectArtifact>false</useProjectArtifact>
-      <includes>
-        <include>org.apache.ambari:ambari-metrics-timelineservice</include>
-      </includes>
-    </dependencySet>
-  </dependencySets>
-
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/grafana.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/grafana.xml
deleted file mode 100644
index fe6da7f..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/grafana.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>grafana</id>
-  <formats>
-    <format>dir</format>
-    <format>tar.gz</format>
-  </formats>
-
-  <fileSets>
-    <fileSet>
-      <directory>${grafana.dir}/target/grafana/${grafana.folder}</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-
-  <files>
-    <file>
-      <source>${grafana.dir}/conf/unix/ams-grafana.ini</source>
-      <outputDirectory>conf</outputDirectory>
-    </file>
-    <file>
-      <source>${grafana.dir}/conf/unix/ams-grafana-env.sh</source>
-      <outputDirectory>conf</outputDirectory>
-    </file>
-    <file>
-      <source>${grafana.dir}/conf/unix/ambari-metrics-grafana</source>
-      <outputDirectory>bin</outputDirectory>
-    </file>
-  </files>
-
-  <dependencySets>
-    <dependencySet>
-      <useProjectArtifact>false</useProjectArtifact>
-      <excludes>
-        <exclude>*</exclude>
-      </excludes>
-    </dependencySet>
-  </dependencySets>
-
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows-choco.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows-choco.xml
deleted file mode 100644
index fe65dab..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows-choco.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-  <id>choco</id>
-  <formats>
-    <format>dir</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <files>
-    <file>
-      <source>${project.build.directory}/ambari-metrics-monitor-${artifact.version}.zip</source>
-      <outputDirectory>content</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/monitor/ambari-metrics-monitor.nuspec</source>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/monitor/chocolateyinstall.ps1</source>
-      <outputDirectory>tools</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/monitor/chocolateyuninstall.ps1</source>
-      <outputDirectory>tools</outputDirectory>
-    </file>
-  </files>
-  <fileSets>
-    <fileSet>
-      <directory>${basedir}/../../ambari-common/src/main/windows</directory>
-      <outputDirectory>modules</outputDirectory>
-      <includes>
-        <include>*.psm1</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml
deleted file mode 100644
index d015d31..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml
+++ /dev/null
@@ -1,89 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-    <id>windows-dist</id>
-  <formats>
-    <format>dir</format>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>${monitor.dir}/src/main/python</directory>
-      <outputDirectory>/sbin</outputDirectory>
-      <includes>
-        <include>*.py</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${project.basedir}/../../ambari-common/src/main/python/ambari_commons</directory>
-      <outputDirectory>/sbin/ambari_commons</outputDirectory>
-      <includes>
-        <include>*.py</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${project.basedir}/../../ambari-common/src/main/python/ambari_commons/resources</directory>
-      <outputDirectory>/sbin/ambari_commons/resources</outputDirectory>
-      <includes>
-        <include>*.json</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${monitor.dir}/src/main/python/core</directory>
-      <outputDirectory>/sbin/core</outputDirectory>
-      <includes>
-        <include>*.py</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${monitor.dir}/target/psutil_build/lib.win-amd64-${python.build.version}</directory>
-      <outputDirectory>/sbin</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${monitor.dir}/conf/windows</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>metric_groups.conf</include>
-        <include>metric_monitor.ini</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${aggregator.dir}/conf/windows</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>log4j.properties</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${monitor.dir}/conf/windows</directory>
-      <outputDirectory>/</outputDirectory>
-      <includes>
-        <include>ambari-metrics-monitor.cmd</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-  <dependencySets>
-    <dependencySet>
-      <useProjectArtifact>false</useProjectArtifact>
-      <excludes>
-        <exclude>*</exclude>
-      </excludes>
-    </dependencySet>
-  </dependencySets>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor.xml
deleted file mode 100644
index 448fe62..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor.xml
+++ /dev/null
@@ -1,78 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>monitor</id>
-  <formats>
-    <format>dir</format>
-    <format>tar.gz</format>
-  </formats>
-
-  <fileSets>
-    <fileSet>
-      <directory>${monitor.dir}/src/main/python</directory>
-      <outputDirectory>site-packages/resource_monitoring</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${project.basedir}/../../ambari-common/src/main/python/ambari_commons</directory>
-      <outputDirectory>site-packages/resource_monitoring/ambari_commons</outputDirectory>
-      <includes>
-        <include>**/*</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${monitor.dir}/conf/unix</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>metric_groups.conf</include>
-        <include>metric_monitor.ini</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${aggregator.dir}/conf/unix</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>log4j.properties</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${monitor.dir}/conf/unix</directory>
-      <outputDirectory>bin</outputDirectory>
-      <includes>
-        <include>ambari-metrics-monitor</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-
-
-
-  <dependencySets>
-    <dependencySet>
-      <useProjectArtifact>false</useProjectArtifact>
-      <excludes>
-        <exclude>*</exclude>
-      </excludes>
-    </dependencySet>
-  </dependencySets>
-
-
-
-
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink-windows-choco.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink-windows-choco.xml
deleted file mode 100644
index b7dcb0f..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink-windows-choco.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-  <id>choco</id>
-  <formats>
-    <format>dir</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <files>
-    <file>
-      <source>${project.build.directory}/ambari-metrics-hadoop-sink-${artifact.version}.zip</source>
-      <outputDirectory>content</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/sink/ambari-metrics-hadoop-sink.nuspec</source>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/sink/chocolateyinstall.ps1</source>
-      <outputDirectory>tools</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/package/choco/sink/chocolateyuninstall.ps1</source>
-      <outputDirectory>tools</outputDirectory>
-    </file>
-  </files>
-  <fileSets>
-    <fileSet>
-      <directory>${basedir}/../../ambari-common/src/main/windows</directory>
-      <outputDirectory>modules</outputDirectory>
-      <includes>
-        <include>*.psm1</include>
-      </includes>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink-windows.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink-windows.xml
deleted file mode 100644
index e82d2d4..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink-windows.xml
+++ /dev/null
@@ -1,69 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>hadoop-sink</id>
-  <formats>
-    <format>dir</format>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>${hadoop-sink.dir}/src/main/conf</directory>
-      <outputDirectory>hadoop-sink/conf</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${flume-sink.dir}/src/main/conf</directory>
-      <outputDirectory>hadoop-sink/conf</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${storm-sink.dir}/src/main/conf</directory>
-      <outputDirectory>hadoop-sink/conf</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${kafka-sink.dir}/target/lib</directory>
-      <outputDirectory>hadoop-sink/lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-
-  <files>
-    <file>
-      <source>${hadoop-sink.dir}/target/ambari-metrics-hadoop-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-    <file>
-      <source>${flume-sink.dir}/target/ambari-metrics-flume-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-    <file>
-      <source>${storm-sink.dir}/target/ambari-metrics-storm-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-    <file>
-      <source>${kafka-sink.dir}/target/ambari-metrics-kafka-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-  </files>
-
-
-
-
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink.xml
deleted file mode 100644
index 1400c7b..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/sink.xml
+++ /dev/null
@@ -1,73 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.1 http://maven.apache.org/xsd/assembly-1.1.1.xsd">
-  <id>hadoop-sink</id>
-  <formats>
-    <format>dir</format>
-    <format>tar.gz</format>
-  </formats>
-
-  <fileSets>
-    <fileSet>
-      <directory>${hadoop-sink.dir}/src/main/conf</directory>
-      <outputDirectory>hadoop-sink/conf</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${flume-sink.dir}/src/main/conf</directory>
-      <outputDirectory>hadoop-sink/conf</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${storm-sink.dir}/src/main/conf</directory>
-      <outputDirectory>hadoop-sink/conf</outputDirectory>
-    </fileSet>
-    <fileSet>
-      <directory>${kafka-sink.dir}/target/lib</directory>
-      <outputDirectory>hadoop-sink/lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-
-  <files>
-    <file>
-      <fileMode>644</fileMode>
-      <source>${hadoop-sink.dir}/target/ambari-metrics-hadoop-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-    <file>
-      <fileMode>644</fileMode>
-      <source>${flume-sink.dir}/target/ambari-metrics-flume-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-    <file>
-      <fileMode>644</fileMode>
-      <source>${storm-sink.dir}/target/ambari-metrics-storm-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-    <file>
-      <fileMode>644</fileMode>
-      <source>${kafka-sink.dir}/target/ambari-metrics-kafka-sink-with-common-${project.version}.jar</source>
-      <outputDirectory>hadoop-sink</outputDirectory>
-    </file>
-  </files>
-
-
-
-
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/ambari-metrics-collector.nuspec b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/ambari-metrics-collector.nuspec
deleted file mode 100644
index a6bb211..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/ambari-metrics-collector.nuspec
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Do not remove this test for UTF-8: if “Ω” doesn’t appear as greek uppercase omega letter enclosed in quotation marks, you should use an editor that supports UTF-8, not this one. -->
-<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
-  <metadata>
-    <!-- Read this before publishing packages to chocolatey.org: https://github.com/chocolatey/chocolatey/wiki/CreatePackages -->
-    <id>ambari-metrics-collector</id>
-    <title>Ambari Metrics Collector</title>
-    <version>1.0</version>
-    <authors>Apache Ambari</authors>
-    <owners>Apache Ambari</owners>
-    <summary>Ambari Metrics Collector</summary>
-    <description>Ambari Metrics Collector
-    </description>
-    <projectUrl>http://ambari.apache.org</projectUrl>
-    <tags>ambari-metrics-collector</tags>
-    <copyright>https://github.com/apache/ambari/blob/trunk/NOTICE.txt</copyright>
-    <licenseUrl>https://github.com/apache/ambari/blob/trunk/LICENSE.txt</licenseUrl>
-    <requireLicenseAcceptance>false</requireLicenseAcceptance>
-    <releaseNotes></releaseNotes>
-  </metadata>
-  <files>
-    <file src="tools\**" target="tools" />
-    <file src="content\**" target="content" />
-    <file src="modules\**" target="modules" />
-  </files>
-</package>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/chocolateyinstall.ps1 b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/chocolateyinstall.ps1
deleted file mode 100644
index 9436582..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/chocolateyinstall.ps1
+++ /dev/null
@@ -1,94 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# Stop on all errors
-$ErrorActionPreference = 'Stop';
-
-# Package Name
-$packageName = $Env:chocolateyPackageName
-# Package Version
-$packageVersion = $Env:chocolateyPackageVersion
-# Package Folder
-$packageFolder = $Env:chocolateyPackageFolder
-# Package Parameters
-$packageParameters = $env:chocolateyPackageParameters
-
-$arguments = @{}
-$ambariRoot = "C:\ambari"
-$retries = 5
-$lockTimeout = 60000
-
-# Parse the packageParameters
-#   /AmbariRoot:C:\ambari /Retries:5
-if ($packageParameters) {
-  $match_pattern = "\/(?<option>([a-zA-Z]+)):(?<value>([`"'])?([a-zA-Z0-9- _\\:\.]+)([`"'])?)|\/(?<option>([a-zA-Z]+))"
-  $option_name = 'option'
-  $value_name = 'value'
-
-  if ($packageParameters -match $match_pattern ){
-    $results = $packageParameters | Select-String $match_pattern -AllMatches
-    $results.matches | % {
-      $arguments.Add(
-        $_.Groups[$option_name].Value.Trim(),
-        $_.Groups[$value_name].Value.Trim())
-    }
-  } else {
-    Throw "Package Parameters were found but were invalid (REGEX Failure)"
-  }
-  if ($arguments.ContainsKey("AmbariRoot")) {
-    Write-Debug "AmbariRoot Argument Found"
-    $ambariRoot = $arguments["AmbariRoot"]
-  }
-  if ($arguments.ContainsKey("Retries")) {
-    Write-Debug "Retries Argument Found"
-    $retries = $arguments["Retries"]
-  }
-  if ($arguments.ContainsKey("LockTimeout")) {
-    Write-Debug "LockTimeout Argument Found"
-    $lockTimeout = $arguments["LockTimeout"]
-  }
-} else {
-  Write-Debug "No Package Parameters Passed in"
-}
-
-$modulesFolder = "$(Join-Path $packageFolder modules)"
-$contentFolder = "$(Join-Path $packageFolder content)"
-$zipFile = "$(Join-Path $contentFolder $packageName-$packageVersion.zip)"
-$specificFolder = ""
-$link = "$ambariRoot\$packageName"
-$target = "$ambariRoot\$packageName-$packageVersion"
-$collectorHome = $link
-$collectorConfDir = "$link\conf"
-
-Import-Module "$modulesFolder\link.psm1"
-Import-Module "$modulesFolder\retry.psm1"
-
-$collectorMutex = New-Object System.Threading.Mutex $false, "Global\$packageName"
-if($collectorMutex.WaitOne($lockTimeout)) {
-  try {
-    Retry-Command -Command "Get-ChocolateyUnzip" -Arguments @{ FileFullPath = $zipFile; Destination = $target; SpecificFolder = $specificFolder; PackageName = $packageName} -Retries $retries
-    Retry-Command -Command "Remove-Symlink-IfExists" -Arguments @{Link = $link} -Retries $retries
-    Retry-Command -Command "New-Symlink" -Arguments @{ Link = $link; Target = $target } -Retries $retries
-
-    [Environment]::SetEnvironmentVariable("COLLECTOR_HOME", $collectorHome, "Machine")
-    [Environment]::SetEnvironmentVariable("COLLECTOR_CONF_DIR", $collectorConfDir, "Machine")
-  } finally {
-    $collectorMutex.ReleaseMutex()
-  }
-} else {
-  Write-Host ("Failed to acquire lock [$packageName] within [$lockTimeout] ms. Installation failed!")
-  throw
-}
-
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/chocolateyuninstall.ps1 b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/chocolateyuninstall.ps1
deleted file mode 100644
index fe6bdbd..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/collector/chocolateyuninstall.ps1
+++ /dev/null
@@ -1,69 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# Stop on all errors
-$ErrorActionPreference = 'Stop';
-
-# Package Name
-$packageName = $Env:chocolateyPackageName
-# Package Version
-$packageVersion = $Env:chocolateyPackageVersion
-# Package Folder
-$packageFolder = $Env:chocolateyPackageFolder
-# Package Parameters
-$packageParameters = $env:chocolateyPackageParameters
-
-$arguments = @{}
-$ambariRoot = "C:\ambari"
-$retries = 5
-# Parse the packageParameters
-#   /AmbariRoot:C:\ambari /Retries:5
-if ($packageParameters) {
-  $match_pattern = "\/(?<option>([a-zA-Z]+)):(?<value>([`"'])?([a-zA-Z0-9- _\\:\.]+)([`"'])?)|\/(?<option>([a-zA-Z]+))"
-  $option_name = 'option'
-  $value_name = 'value'
-
-  if ($packageParameters -match $match_pattern ){
-    $results = $packageParameters | Select-String $match_pattern -AllMatches
-    $results.matches | % {
-      $arguments.Add(
-        $_.Groups[$option_name].Value.Trim(),
-        $_.Groups[$value_name].Value.Trim())
-    }
-  } else {
-    Throw "Package Parameters were found but were invalid (REGEX Failure)"
-  }
-  if ($arguments.ContainsKey("AmbariRoot")) {
-    Write-Debug "AmbariRoot Argument Found"
-    $ambariRoot = $arguments["AmbariRoot"]
-  }
-  if ($arguments.ContainsKey("Retries")) {
-    Write-Debug "Retries Argument Found"
-    $retries = $arguments["Retries"]
-  }
-} else {
-  Write-Debug "No Package Parameters Passed in"
-}
-
-$modulesFolder = "$(Join-Path $packageFolder modules)"
-$contentFolder = "$(Join-Path $packageFolder content)"
-$link = "$ambariRoot\$packageName"
-$target = "$ambariRoot\$packageName-$packageVersion"
-
-Import-Module "$modulesFolder\link.psm1"
-Import-Module "$modulesFolder\retry.psm1"
-
-Retry-Command -Command "Remove-Symlink-IfExists" -Arguments @{Link = $link} -Retries $retries
-Retry-Command -Command "Remove-Item" -Arguments @{ Path = $target; Recurse = $true; Force = $true }
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/ambari-metrics-monitor.nuspec b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/ambari-metrics-monitor.nuspec
deleted file mode 100644
index 3500cab..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/ambari-metrics-monitor.nuspec
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Do not remove this test for UTF-8: if “Ω” doesn’t appear as greek uppercase omega letter enclosed in quotation marks, you should use an editor that supports UTF-8, not this one. -->
-<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
-  <metadata>
-    <!-- Read this before publishing packages to chocolatey.org: https://github.com/chocolatey/chocolatey/wiki/CreatePackages -->
-    <id>ambari-metrics-monitor</id>
-    <title>Ambari Metrics Monitor</title>
-    <version>1.0</version>
-    <authors>Apache Ambari</authors>
-    <owners>Apache Ambari</owners>
-    <summary>Ambari Metrics Monitor</summary>
-    <description>Ambari Metrics Monitor
-    </description>
-    <projectUrl>http://ambari.apache.org</projectUrl>
-    <tags>ambari-metrics-monitor</tags>
-    <copyright>https://github.com/apache/ambari/blob/trunk/NOTICE.txt</copyright>
-    <licenseUrl>https://github.com/apache/ambari/blob/trunk/LICENSE.txt</licenseUrl>
-    <requireLicenseAcceptance>false</requireLicenseAcceptance>
-    <releaseNotes></releaseNotes>
-  </metadata>
-  <files>
-    <file src="tools\**" target="tools" />
-    <file src="content\**" target="content" />
-    <file src="modules\**" target="modules" />
-  </files>
-</package>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/chocolateyinstall.ps1 b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/chocolateyinstall.ps1
deleted file mode 100644
index c1ad8c3..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/chocolateyinstall.ps1
+++ /dev/null
@@ -1,93 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# Stop on all errors
-$ErrorActionPreference = 'Stop';
-
-# Package Name
-$packageName = $Env:chocolateyPackageName
-# Package Version
-$packageVersion = $Env:chocolateyPackageVersion
-# Package Folder
-$packageFolder = $Env:chocolateyPackageFolder
-# Package Parameters
-$packageParameters = $env:chocolateyPackageParameters
-
-$arguments = @{}
-$ambariRoot = "C:\ambari"
-$retries = 5
-$lockTimeout = 60000
-
-# Parse the packageParameters
-#   /AmbariRoot:C:\ambari /Retries:5
-if ($packageParameters) {
-  $match_pattern = "\/(?<option>([a-zA-Z]+)):(?<value>([`"'])?([a-zA-Z0-9- _\\:\.]+)([`"'])?)|\/(?<option>([a-zA-Z]+))"
-  $option_name = 'option'
-  $value_name = 'value'
-
-  if ($packageParameters -match $match_pattern ){
-    $results = $packageParameters | Select-String $match_pattern -AllMatches
-    $results.matches | % {
-      $arguments.Add(
-        $_.Groups[$option_name].Value.Trim(),
-        $_.Groups[$value_name].Value.Trim())
-    }
-  } else {
-    Throw "Package Parameters were found but were invalid (REGEX Failure)"
-  }
-  if ($arguments.ContainsKey("AmbariRoot")) {
-    Write-Debug "AmbariRoot Argument Found"
-    $ambariRoot = $arguments["AmbariRoot"]
-  }
-  if ($arguments.ContainsKey("Retries")) {
-    Write-Debug "Retries Argument Found"
-    $retries = $arguments["Retries"]
-  }
-  if ($arguments.ContainsKey("LockTimeout")) {
-    Write-Debug "LockTimeout Argument Found"
-    $lockTimeout = $arguments["LockTimeout"]
-  }
-} else {
-  Write-Debug "No Package Parameters Passed in"
-}
-
-$modulesFolder = "$(Join-Path $packageFolder modules)"
-$contentFolder = "$(Join-Path $packageFolder content)"
-$zipFile = "$(Join-Path $contentFolder $packageName-$packageVersion.zip)"
-$specificFolder = ""
-$link = "$ambariRoot\$packageName"
-$target = "$ambariRoot\$packageName-$packageVersion"
-$monitorHome = $link
-$monitorConfDir = "$link\conf"
-
-Import-Module "$modulesFolder\link.psm1"
-Import-Module "$modulesFolder\retry.psm1"
-
-$monitorMutex = New-Object System.Threading.Mutex $false, "Global\$packageName"
-if($monitorMutex.WaitOne($lockTimeout)) {
-  try {
-    Retry-Command -Command "Get-ChocolateyUnzip" -Arguments @{ FileFullPath = $zipFile; Destination = $target; SpecificFolder = $specificFolder; PackageName = $packageName} -Retries $retries
-    Retry-Command -Command "Remove-Symlink-IfExists" -Arguments @{Link = $link} -Retries $retries
-    Retry-Command -Command "New-Symlink" -Arguments @{ Link = $link; Target = $target } -Retries $retries
-
-    [Environment]::SetEnvironmentVariable("MONITOR_HOME", $monitorHome, "Machine")
-    [Environment]::SetEnvironmentVariable("MONITOR_CONF_DIR", $monitorConfDir, "Machine")
-  } finally {
-    $monitorMutex.ReleaseMutex()
-  }
-} else {
-  Write-Host ("Failed to acquire lock [$packageName] within [$lockTimeout] ms. Installation failed!")
-  throw
-}
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/chocolateyuninstall.ps1 b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/chocolateyuninstall.ps1
deleted file mode 100644
index fe6bdbd..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/monitor/chocolateyuninstall.ps1
+++ /dev/null
@@ -1,69 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# Stop on all errors
-$ErrorActionPreference = 'Stop';
-
-# Package Name
-$packageName = $Env:chocolateyPackageName
-# Package Version
-$packageVersion = $Env:chocolateyPackageVersion
-# Package Folder
-$packageFolder = $Env:chocolateyPackageFolder
-# Package Parameters
-$packageParameters = $env:chocolateyPackageParameters
-
-$arguments = @{}
-$ambariRoot = "C:\ambari"
-$retries = 5
-# Parse the packageParameters
-#   /AmbariRoot:C:\ambari /Retries:5
-if ($packageParameters) {
-  $match_pattern = "\/(?<option>([a-zA-Z]+)):(?<value>([`"'])?([a-zA-Z0-9- _\\:\.]+)([`"'])?)|\/(?<option>([a-zA-Z]+))"
-  $option_name = 'option'
-  $value_name = 'value'
-
-  if ($packageParameters -match $match_pattern ){
-    $results = $packageParameters | Select-String $match_pattern -AllMatches
-    $results.matches | % {
-      $arguments.Add(
-        $_.Groups[$option_name].Value.Trim(),
-        $_.Groups[$value_name].Value.Trim())
-    }
-  } else {
-    Throw "Package Parameters were found but were invalid (REGEX Failure)"
-  }
-  if ($arguments.ContainsKey("AmbariRoot")) {
-    Write-Debug "AmbariRoot Argument Found"
-    $ambariRoot = $arguments["AmbariRoot"]
-  }
-  if ($arguments.ContainsKey("Retries")) {
-    Write-Debug "Retries Argument Found"
-    $retries = $arguments["Retries"]
-  }
-} else {
-  Write-Debug "No Package Parameters Passed in"
-}
-
-$modulesFolder = "$(Join-Path $packageFolder modules)"
-$contentFolder = "$(Join-Path $packageFolder content)"
-$link = "$ambariRoot\$packageName"
-$target = "$ambariRoot\$packageName-$packageVersion"
-
-Import-Module "$modulesFolder\link.psm1"
-Import-Module "$modulesFolder\retry.psm1"
-
-Retry-Command -Command "Remove-Symlink-IfExists" -Arguments @{Link = $link} -Retries $retries
-Retry-Command -Command "Remove-Item" -Arguments @{ Path = $target; Recurse = $true; Force = $true }
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/ambari-metrics-hadoop-sink.nuspec b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/ambari-metrics-hadoop-sink.nuspec
deleted file mode 100644
index 27dec8f..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/ambari-metrics-hadoop-sink.nuspec
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Do not remove this test for UTF-8: if “Ω” doesn’t appear as greek uppercase omega letter enclosed in quotation marks, you should use an editor that supports UTF-8, not this one. -->
-<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
-  <metadata>
-    <!-- Read this before publishing packages to chocolatey.org: https://github.com/chocolatey/chocolatey/wiki/CreatePackages -->
-    <id>ambari-metrics-hadoop-sink</id>
-    <title>Ambari Metrics Hadoop Sink</title>
-    <version>1.0</version>
-    <authors>Apache Ambari</authors>
-    <owners>Apache Ambari</owners>
-    <summary>Ambari Metrics Hadoop Sink</summary>
-    <description>Ambari Metrics Hadoop Sink
-    </description>
-    <projectUrl>http://ambari.apache.org</projectUrl>
-    <tags>ambari-metrics-hadoop-sink</tags>
-    <copyright>https://github.com/apache/ambari/blob/trunk/NOTICE.txt</copyright>
-    <licenseUrl>https://github.com/apache/ambari/blob/trunk/LICENSE.txt</licenseUrl>
-    <requireLicenseAcceptance>false</requireLicenseAcceptance>
-    <releaseNotes></releaseNotes>
-  </metadata>
-  <files>
-    <file src="tools\**" target="tools" />
-    <file src="content\**" target="content" />
-    <file src="modules\**" target="modules" />
-  </files>
-</package>
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/chocolateyinstall.ps1 b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/chocolateyinstall.ps1
deleted file mode 100644
index 898f7d7..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/chocolateyinstall.ps1
+++ /dev/null
@@ -1,91 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# Stop on all errors
-$ErrorActionPreference = 'Stop';
-
-# Package Name
-$packageName = $Env:chocolateyPackageName
-# Package Version
-$packageVersion = $Env:chocolateyPackageVersion
-# Package Folder
-$packageFolder = $Env:chocolateyPackageFolder
-# Package Parameters
-$packageParameters = $env:chocolateyPackageParameters
-
-$arguments = @{}
-$ambariRoot = "C:\ambari"
-$retries = 5
-$lockTimeout = 60000
-
-# Parse the packageParameters
-#   /AmbariRoot:C:\ambari /Retries:5
-if ($packageParameters) {
-  $match_pattern = "\/(?<option>([a-zA-Z]+)):(?<value>([`"'])?([a-zA-Z0-9- _\\:\.]+)([`"'])?)|\/(?<option>([a-zA-Z]+))"
-  $option_name = 'option'
-  $value_name = 'value'
-
-  if ($packageParameters -match $match_pattern ){
-    $results = $packageParameters | Select-String $match_pattern -AllMatches
-    $results.matches | % {
-      $arguments.Add(
-        $_.Groups[$option_name].Value.Trim(),
-        $_.Groups[$value_name].Value.Trim())
-    }
-  } else {
-    Throw "Package Parameters were found but were invalid (REGEX Failure)"
-  }
-  if ($arguments.ContainsKey("AmbariRoot")) {
-    Write-Debug "AmbariRoot Argument Found"
-    $ambariRoot = $arguments["AmbariRoot"]
-  }
-  if ($arguments.ContainsKey("Retries")) {
-    Write-Debug "Retries Argument Found"
-    $retries = $arguments["Retries"]
-  }
-  if ($arguments.ContainsKey("LockTimeout")) {
-    Write-Debug "LockTimeout Argument Found"
-    $lockTimeout = $arguments["LockTimeout"]
-  }
-} else {
-  Write-Debug "No Package Parameters Passed in"
-}
-
-$modulesFolder = "$(Join-Path $packageFolder modules)"
-$contentFolder = "$(Join-Path $packageFolder content)"
-$zipFile = "$(Join-Path $contentFolder $packageName-$packageVersion.zip)"
-$specificFolder = ""
-$link = "$ambariRoot\$packageName"
-$target = "$ambariRoot\$packageName-$packageVersion"
-$sinkHome = $link
-
-Import-Module "$modulesFolder\link.psm1"
-Import-Module "$modulesFolder\retry.psm1"
-
-$sinkMutex = New-Object System.Threading.Mutex $false, "Global\$packageName"
-if($sinkMutex.WaitOne($lockTimeout)) {
-  try {
-    Retry-Command -Command "Get-ChocolateyUnzip" -Arguments @{ FileFullPath = $zipFile; Destination = $target; SpecificFolder = $specificFolder; PackageName = $packageName} -Retries $retries
-    Retry-Command -Command "Remove-Symlink-IfExists" -Arguments @{Link = $link} -Retries $retries
-    Retry-Command -Command "New-Symlink" -Arguments @{ Link = $link; Target = $target } -Retries $retries
-
-    [Environment]::SetEnvironmentVariable("SINK_HOME", $sinkHome, "Machine")
-  } finally {
-    $sinkMutex.ReleaseMutex()
-  }
-} else {
-  Write-Host ("Failed to acquire lock [$packageName] within [$lockTimeout] ms. Installation failed!")
-  throw
-}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/chocolateyuninstall.ps1 b/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/chocolateyuninstall.ps1
deleted file mode 100644
index fe6bdbd..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/choco/sink/chocolateyuninstall.ps1
+++ /dev/null
@@ -1,69 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# Stop on all errors
-$ErrorActionPreference = 'Stop';
-
-# Package Name
-$packageName = $Env:chocolateyPackageName
-# Package Version
-$packageVersion = $Env:chocolateyPackageVersion
-# Package Folder
-$packageFolder = $Env:chocolateyPackageFolder
-# Package Parameters
-$packageParameters = $env:chocolateyPackageParameters
-
-$arguments = @{}
-$ambariRoot = "C:\ambari"
-$retries = 5
-# Parse the packageParameters
-#   /AmbariRoot:C:\ambari /Retries:5
-if ($packageParameters) {
-  $match_pattern = "\/(?<option>([a-zA-Z]+)):(?<value>([`"'])?([a-zA-Z0-9- _\\:\.]+)([`"'])?)|\/(?<option>([a-zA-Z]+))"
-  $option_name = 'option'
-  $value_name = 'value'
-
-  if ($packageParameters -match $match_pattern ){
-    $results = $packageParameters | Select-String $match_pattern -AllMatches
-    $results.matches | % {
-      $arguments.Add(
-        $_.Groups[$option_name].Value.Trim(),
-        $_.Groups[$value_name].Value.Trim())
-    }
-  } else {
-    Throw "Package Parameters were found but were invalid (REGEX Failure)"
-  }
-  if ($arguments.ContainsKey("AmbariRoot")) {
-    Write-Debug "AmbariRoot Argument Found"
-    $ambariRoot = $arguments["AmbariRoot"]
-  }
-  if ($arguments.ContainsKey("Retries")) {
-    Write-Debug "Retries Argument Found"
-    $retries = $arguments["Retries"]
-  }
-} else {
-  Write-Debug "No Package Parameters Passed in"
-}
-
-$modulesFolder = "$(Join-Path $packageFolder modules)"
-$contentFolder = "$(Join-Path $packageFolder content)"
-$link = "$ambariRoot\$packageName"
-$target = "$ambariRoot\$packageName-$packageVersion"
-
-Import-Module "$modulesFolder\link.psm1"
-Import-Module "$modulesFolder\retry.psm1"
-
-Retry-Command -Command "Remove-Symlink-IfExists" -Arguments @{Link = $link} -Retries $retries
-Retry-Command -Command "Remove-Item" -Arguments @{ Path = $target; Recurse = $true; Force = $true }
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/control b/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/control
deleted file mode 100644
index 40cd855..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-Package: [[artifactId]]
-Version: [[package-version]]-[[package-release]]
-Section: [[deb.section]]
-Priority: [[deb.priority]]
-Depends: [[deb.dependency.list]]
-Architecture: [[deb.architecture]]
-Description: [[description]]
-Maintainer: [[deb.publisher]]
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/postinst b/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/postinst
deleted file mode 100644
index e75d557..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/postinst
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-HADOOP_LINK_NAME="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar"
-HADOOP_SINK_JAR="/usr/lib/ambari-metrics-hadoop-sink/${hadoop.sink.jar}"
-
-FLUME_LINK_NAME="/usr/lib/flume/lib/ambari-metrics-flume-sink.jar"
-FLUME_SINK_JAR="/usr/lib/flume/lib/${flume.sink.jar}"
-
-KAFKA_LINK_NAME="/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar"
-KAFKA_SINK_JAR="/usr/lib/ambari-metrics-kafka-sink/${kafka.sink.jar}"
-
-#link for storm jar not required with current loading
-#STORM_SINK_JAR="/usr/lib/storm/lib/${storm.sink.jar}"
-#STORM_LINK_NAME="/usr/lib/storm/lib/ambari-metrics-storm-sink.jar"
-
-JARS=(${HADOOP_SINK_JAR} ${FLUME_SINK_JAR} ${KAFKA_SINK_JAR})
-LINKS=(${HADOOP_LINK_NAME} ${FLUME_LINK_NAME} ${KAFKA_LINK_NAME})
-
-for index in ${!LINKS[*]}
-do
-  rm -f ${LINKS[$index]} ; ln -s ${JARS[$index]} ${LINKS[$index]}
-done
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst b/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst
deleted file mode 100644
index 7560fb9..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/preinst
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-JAR_FILES_LEGACY_FOLDER="/usr/lib/ambari-metrics-sink-legacy"
-
-HADOOP_SINK_LINK="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar"
-
-HADOOP_LEGACY_LINK_NAME="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar"
-
-if [ -f ${HADOOP_SINK_LINK} ]
-then
-    old_jar=$(readlink -f ${HADOOP_SINK_LINK})
-    version_part=$(basename ${old_jar} | awk -F"-" '{print $7}')
-    IFS=. version=(${version_part})
-    unset IFS
-
-    if [[ ${version[0]} -le 2 && ${version[1]} -lt 7 ]] # backup only required on upgrade from version < 2.7
-    then
-        if [ ! -d "$JAR_FILES_LEGACY_FOLDER" ]
-        then
-            mkdir -p "$JAR_FILES_LEGACY_FOLDER"
-        fi
-        echo "Backing up Ambari metrics hadoop sink jar ${old_jar} -> $JAR_FILES_LEGACY_FOLDER/"
-        cp "${old_jar}" "${JAR_FILES_LEGACY_FOLDER}/"
-
-        HADOOP_SINK_LEGACY_JAR="$JAR_FILES_LEGACY_FOLDER/$(basename ${old_jar})"
-        echo "Creating symlink for backup jar $HADOOP_LEGACY_LINK_NAME -> $HADOOP_SINK_LEGACY_JAR"
-        rm -f "${HADOOP_LEGACY_LINK_NAME}" ; ln -s "${HADOOP_SINK_LEGACY_JAR}" "${HADOOP_LEGACY_LINK_NAME}"
-    fi
-fi
-
-exit 0
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/prerm b/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/prerm
deleted file mode 100644
index f50d88a..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/deb/control/prerm
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-# WARNING: This script is performed not only on uninstall, but also
-# during package update. See http://www.ibm.com/developerworks/library/l-rpm2/
-# for details
-
-RESOURCE_MONITORING_DIR=/usr/lib/python2.6/site-packages/resource_monitoring
-PSUTIL_DIR="${RESOURCE_MONITORING_DIR}/psutil"
-
-
-if [ -d "${PSUTIL_DIR}" ]; then
-  rm -rf "${PSUTIL_DIR}/*"
-fi
-
-exit 0
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/postinstall.sh b/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/postinstall.sh
deleted file mode 100644
index e75d557..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/postinstall.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-HADOOP_LINK_NAME="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar"
-HADOOP_SINK_JAR="/usr/lib/ambari-metrics-hadoop-sink/${hadoop.sink.jar}"
-
-FLUME_LINK_NAME="/usr/lib/flume/lib/ambari-metrics-flume-sink.jar"
-FLUME_SINK_JAR="/usr/lib/flume/lib/${flume.sink.jar}"
-
-KAFKA_LINK_NAME="/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar"
-KAFKA_SINK_JAR="/usr/lib/ambari-metrics-kafka-sink/${kafka.sink.jar}"
-
-#link for storm jar not required with current loading
-#STORM_SINK_JAR="/usr/lib/storm/lib/${storm.sink.jar}"
-#STORM_LINK_NAME="/usr/lib/storm/lib/ambari-metrics-storm-sink.jar"
-
-JARS=(${HADOOP_SINK_JAR} ${FLUME_SINK_JAR} ${KAFKA_SINK_JAR})
-LINKS=(${HADOOP_LINK_NAME} ${FLUME_LINK_NAME} ${KAFKA_LINK_NAME})
-
-for index in ${!LINKS[*]}
-do
-  rm -f ${LINKS[$index]} ; ln -s ${JARS[$index]} ${LINKS[$index]}
-done
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh b/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh
deleted file mode 100644
index 7560fb9..0000000
--- a/ambari-metrics/ambari-metrics-assembly/src/main/package/rpm/sink/preinstall.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-JAR_FILES_LEGACY_FOLDER="/usr/lib/ambari-metrics-sink-legacy"
-
-HADOOP_SINK_LINK="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar"
-
-HADOOP_LEGACY_LINK_NAME="/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink-legacy.jar"
-
-if [ -f ${HADOOP_SINK_LINK} ]
-then
-    old_jar=$(readlink -f ${HADOOP_SINK_LINK})
-    version_part=$(basename ${old_jar} | awk -F"-" '{print $7}')
-    IFS=. version=(${version_part})
-    unset IFS
-
-    if [[ ${version[0]} -le 2 && ${version[1]} -lt 7 ]] # backup only required on upgrade from version < 2.7
-    then
-        if [ ! -d "$JAR_FILES_LEGACY_FOLDER" ]
-        then
-            mkdir -p "$JAR_FILES_LEGACY_FOLDER"
-        fi
-        echo "Backing up Ambari metrics hadoop sink jar ${old_jar} -> $JAR_FILES_LEGACY_FOLDER/"
-        cp "${old_jar}" "${JAR_FILES_LEGACY_FOLDER}/"
-
-        HADOOP_SINK_LEGACY_JAR="$JAR_FILES_LEGACY_FOLDER/$(basename ${old_jar})"
-        echo "Creating symlink for backup jar $HADOOP_LEGACY_LINK_NAME -> $HADOOP_SINK_LEGACY_JAR"
-        rm -f "${HADOOP_LEGACY_LINK_NAME}" ; ln -s "${HADOOP_SINK_LEGACY_JAR}" "${HADOOP_LEGACY_LINK_NAME}"
-    fi
-fi
-
-exit 0
diff --git a/ambari-metrics/ambari-metrics-common/pom.xml b/ambari-metrics/ambari-metrics-common/pom.xml
deleted file mode 100644
index 1c092dc..0000000
--- a/ambari-metrics/ambari-metrics-common/pom.xml
+++ /dev/null
@@ -1,231 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <parent>
-    <artifactId>ambari-metrics</artifactId>
-    <groupId>org.apache.ambari</groupId>
-    <version>2.0.0.0-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>ambari-metrics-common</artifactId>
-  <name>Ambari Metrics Common</name>
-
-  <build>
-    <plugins>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.0</version>
-      </plugin>
-      <plugin>
-        <groupId>org.vafer</groupId>
-        <artifactId>jdeb</artifactId>
-        <version>1.0.1</version>
-        <executions>
-          <execution>
-            <!--Stub execution on direct plugin call - workaround for ambari deb build process-->
-            <id>stub-execution</id>
-            <phase>none</phase>
-            <goals>
-              <goal>jdeb</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <skip>true</skip>
-          <attach>false</attach>
-          <submodules>false</submodules>
-          <controlDir>${project.basedir}/../src/main/package/deb/control</controlDir>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>3.1.0</version>
-        <executions>
-          <!-- Run shade goal on package phase -->
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <minimizeJar>true</minimizeJar>
-              <createDependencyReducedPom>false</createDependencyReducedPom>
-              <relocations>
-                <relocation>
-                  <pattern>com.google</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.google</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.commons.io</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.commons.io</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.commons.lang</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.relocated.commons.lang</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.commons.math3</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.relocated.commons.math3</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.commons.codec</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.relocated.commons.codec</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.curator</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.curator</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.jute</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.jute</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.zookeeper</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.zookeeper</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.slf4j</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.slf4j</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.log4j</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.log4j</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>jline</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.jline</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jboss</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.jboss</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.http</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.apache.http</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.codehaus.jackson</pattern>
-                  <shadedPattern>org.apache.ambari.metrics.sink.relocated.jackson</shadedPattern>
-                </relocation>
-              </relocations>
-              <filters>
-                <filter>
-                  <artifact>*:*</artifact>
-                  <excludes>
-                    <exclude>META-INF/*.SF</exclude>
-                    <exclude>META-INF/*.DSA</exclude>
-                    <exclude>META-INF/*.RSA</exclude>
-                  </excludes>
-                </filter>
-              </filters>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-  <dependencies>
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-      <version>1.1.1</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-io</groupId>
-      <artifactId>commons-io</artifactId>
-      <version>2.4</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-      <version>2.2.2</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.guava</groupId>
-      <artifactId>guava</artifactId>
-      <version>14.0.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.curator</groupId>
-      <artifactId>curator-framework</artifactId>
-      <version>4.0.0</version>
-    </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-xc</artifactId>
-      <version>1.9.13</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-annotations</artifactId>
-      <version>2.6.0</version>
-      <exclusions>
-        <exclusion>
-          <groupId>jdk.tools</groupId>
-          <artifactId>jdk.tools</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-      <version>1.9.13</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-lang</groupId>
-      <artifactId>commons-lang</artifactId>
-      <version>2.5</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-math3</artifactId>
-      <version>3.1.1</version>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-      <version>4.10</version>
-    </dependency>
-    <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
-      <version>3.2</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-api-easymock</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-module-junit4</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <version>4.5.2</version>
-    </dependency>
-  </dependencies>
-</project>
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
deleted file mode 100644
index 739e9dc..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ /dev/null
@@ -1,754 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import com.google.common.base.Supplier;
-import com.google.common.base.Suppliers;
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.reflect.TypeToken;
-import com.google.gson.Gson;
-import com.google.gson.JsonSyntaxException;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorHAHelper;
-import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorUnavailableException;
-import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardHostnameHashingStrategy;
-import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardStrategy;
-import org.apache.http.HttpStatus;
-import org.codehaus.jackson.map.AnnotationIntrospector;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
-
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLSocketFactory;
-import javax.net.ssl.TrustManagerFactory;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.StringWriter;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.security.KeyStore;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Random;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public abstract class AbstractTimelineMetricsSink {
-  public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
-  public static final String MAX_METRIC_ROW_CACHE_SIZE = "maxRowCacheSize";
-  public static final String METRICS_SEND_INTERVAL = "sendInterval";
-  public static final String METRICS_POST_TIMEOUT_SECONDS = "timeout";
-  public static final String COLLECTOR_HOSTS_PROPERTY = "collector.hosts";
-  public static final String COLLECTOR_PROTOCOL = "protocol";
-  public static final String COLLECTOR_PORT = "port";
-  public static final String ZOOKEEPER_QUORUM = "zookeeper.quorum";
-  public static final String COLLECTOR_ZOOKEEPER_QUORUM = "metrics.zookeeper.quorum";
-  public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
-  public static final String SKIP_COUNTER_TRANSFROMATION = "skipCounterDerivative";
-  public static final String RPC_METRIC_PREFIX = "metric.rpc";
-  public static final String WS_V1_TIMELINE_METRICS = "/ws/v1/timeline/metrics";
-  public static final String SSL_KEYSTORE_PATH_PROPERTY = "truststore.path";
-  public static final String SSL_KEYSTORE_TYPE_PROPERTY = "truststore.type";
-  public static final String SSL_KEYSTORE_PASSWORD_PROPERTY = "truststore.password";
-  public static final String HOST_IN_MEMORY_AGGREGATION_ENABLED_PROPERTY = "host_in_memory_aggregation";
-  public static final String HOST_IN_MEMORY_AGGREGATION_PORT_PROPERTY = "host_in_memory_aggregation_port";
-  public static final String HOST_IN_MEMORY_AGGREGATION_PROTOCOL_PROPERTY = "host_in_memory_aggregation_protocol";
-  public static final String COLLECTOR_LIVE_NODES_PATH = "/ws/v1/timeline/metrics/livenodes";
-  public static final String INSTANCE_ID_PROPERTY = "instanceId";
-  public static final String SET_INSTANCE_ID_PROPERTY = "set.instanceId";
-  public static final String COOKIE = "Cookie";
-  private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
-  private static final String NEGOTIATE = "Negotiate";
-
-  protected final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
-  public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
-  protected static final AtomicInteger nullCollectorCounter = new AtomicInteger(0);
-  public static int NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS = 20;
-  public int ZK_CONNECT_TRY_COUNT = 10;
-  public int ZK_SLEEP_BETWEEN_RETRY_TIME = 2000;
-  public boolean shardExpired = true;
-  private int zookeeperMinBackoffTimeMins = 2;
-  private int zookeeperMaxBackoffTimeMins = 5;
-  private long zookeeperBackoffTimeMillis;
-  private long lastFailedZkRequestTime = 0l;
-
-  private SSLSocketFactory sslSocketFactory;
-  private AppCookieManager appCookieManager = null;
-
-  protected final Log LOG;
-
-  protected static ObjectMapper mapper;
-
-  protected MetricCollectorHAHelper collectorHAHelper;
-
-  protected MetricSinkWriteShardStrategy metricSinkWriteShardStrategy;
-
-  // Single element cache with fixed expiration - Helps adjacent Sinks as
-  // well as timed refresh
-  protected Supplier<String> targetCollectorHostSupplier;
-
-  protected final SortedSet<String> allKnownLiveCollectors = new TreeSet<>();
-
-  private volatile boolean isInitializedForHA = false;
-
-  @SuppressWarnings("all")
-  private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 3;
-
-  private final Gson gson = new Gson();
-
-  private final Random rand = new Random();
-
-  private static final int COLLECTOR_HOST_CACHE_MAX_EXPIRATION_MINUTES = 75;
-  private static final int COLLECTOR_HOST_CACHE_MIN_EXPIRATION_MINUTES = 60;
-
-  //10 seconds
-  protected int collectionPeriodMillis = 10000;
-
-  private int cacheExpireTimeMinutesDefault = 10;
-
-  private volatile Cache<String, TimelineMetric> metricsPostCache = CacheBuilder.newBuilder().expireAfterAccess(cacheExpireTimeMinutesDefault, TimeUnit.MINUTES).build();
-
-  static {
-    mapper = new ObjectMapper();
-    AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
-    mapper.setAnnotationIntrospector(introspector);
-    mapper.getSerializationConfig()
-      .withSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
-  }
-
-  public AbstractTimelineMetricsSink() {
-    LOG = LogFactory.getLog(this.getClass());
-  }
-
-  /**
-   * Initialize Sink write strategy with respect to HA Collector
-   */
-  protected void init() {
-    metricSinkWriteShardStrategy = new MetricSinkWriteShardHostnameHashingStrategy(getHostname());
-    collectorHAHelper = new MetricCollectorHAHelper(getZookeeperQuorum(),
-      ZK_CONNECT_TRY_COUNT, ZK_SLEEP_BETWEEN_RETRY_TIME);
-    zookeeperBackoffTimeMillis = getZookeeperBackoffTimeMillis();
-    isInitializedForHA = true;
-  }
-
-  protected boolean emitMetricsJson(String connectUrl, String jsonData) {
-    int timeout = getTimeoutSeconds() * 1000;
-    HttpURLConnection connection = null;
-    try {
-      if (connectUrl == null) {
-        throw new IOException("Unknown URL. Unable to connect to metrics collector.");
-      }
-      connection = connectUrl.startsWith("https") ?
-          getSSLConnection(connectUrl) : getConnection(connectUrl);
-
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("emitMetricsJson to " + connectUrl + ", " + jsonData);
-      }
-      AppCookieManager appCookieManager = getAppCookieManager();
-      String appCookie = appCookieManager.getCachedAppCookie(connectUrl);
-      if (appCookie != null) {
-        if (LOG.isInfoEnabled()) {
-          LOG.info("Using cached app cookie for URL:" + connectUrl);
-        }
-        connection.setRequestProperty(COOKIE, appCookie);
-      }
-
-      int statusCode = emitMetricsJson(connection, timeout, jsonData);
-
-      if (statusCode == HttpStatus.SC_UNAUTHORIZED ) {
-        String wwwAuthHeader = connection.getHeaderField(WWW_AUTHENTICATE);
-        if (LOG.isInfoEnabled()) {
-          LOG.info("Received WWW-Authentication header:" + wwwAuthHeader + ", for URL:" + connectUrl);
-        }
-        if (wwwAuthHeader != null && wwwAuthHeader.trim().startsWith(NEGOTIATE)) {
-          appCookie = appCookieManager.getAppCookie(connectUrl, true);
-          if (appCookie != null) {
-            cleanupInputStream(connection.getInputStream());
-            connection = connectUrl.startsWith("https") ?
-                getSSLConnection(connectUrl) : getConnection(connectUrl);
-            connection.setRequestProperty(COOKIE, appCookie);
-            statusCode = emitMetricsJson(connection, timeout, jsonData);
-          }
-        } else {
-          // no supported authentication type found
-          // we would let the original response propagate
-          LOG.error("Unsupported WWW-Authentication header:" + wwwAuthHeader+ ", for URL:" + connectUrl);
-        }
-      }
-
-      if (statusCode != 200) {
-        LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
-            "statusCode = " + statusCode);
-      } else {
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("Metrics posted to Collector " + connectUrl);
-        }
-      }
-      cleanupInputStream(connection.getInputStream());
-      // reset failedCollectorConnectionsCounter to "0"
-      failedCollectorConnectionsCounter.set(0);
-      return true;
-    } catch (IOException ioe) {
-      StringBuilder errorMessage =
-          new StringBuilder("Unable to connect to collector, " + connectUrl + "\n"
-                  + "This exceptions will be ignored for next " + NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS + " times\n");
-      try {
-        if ((connection != null)) {
-          errorMessage.append(cleanupInputStream(connection.getErrorStream()));
-        }
-      } catch (IOException e) {
-        //NOP
-      }
-
-      if (failedCollectorConnectionsCounter.getAndIncrement() == 0) {
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(errorMessage, ioe);
-        } else {
-          LOG.info(errorMessage);
-        }
-        throw new UnableToConnectException(ioe).setConnectUrl(connectUrl);
-      } else {
-        failedCollectorConnectionsCounter.compareAndSet(NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS, 0);
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(String.format("Ignoring %s AMS connection exceptions", NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS));
-        }
-        return false;
-      }
-    }
-  }
-
-  private int emitMetricsJson(HttpURLConnection connection, int timeout, String jsonData) throws IOException {
-    connection.setRequestMethod("POST");
-    connection.setRequestProperty("Content-Type", "application/json");
-    connection.setRequestProperty("Connection", "Keep-Alive");
-    connection.setConnectTimeout(timeout);
-    connection.setReadTimeout(timeout);
-    connection.setDoOutput(true);
-
-    if (jsonData != null) {
-      try (OutputStream os = connection.getOutputStream()) {
-        os.write(jsonData.getBytes("UTF-8"));
-      }
-    }
-
-    int statusCode = connection.getResponseCode();
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("emitMetricsJson: statusCode = " + statusCode);
-    }
-    return statusCode;
-  }
-
-  protected String getCurrentCollectorHost() {
-    String collectorHost;
-    // Get cached target
-    if (targetCollectorHostSupplier != null) {
-      collectorHost = targetCollectorHostSupplier.get();
-      // Last X attempts have failed - force refresh
-      if (failedCollectorConnectionsCounter.get() > RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER) {
-        LOG.debug("Removing collector " + collectorHost + " from allKnownLiveCollectors.");
-        allKnownLiveCollectors.remove(collectorHost);
-        targetCollectorHostSupplier = null;
-        collectorHost = findPreferredCollectHost();
-      }
-    } else {
-      collectorHost = findPreferredCollectHost();
-    }
-
-    if (collectorHost == null) {
-      if (nullCollectorCounter.getAndIncrement() == 0) {
-        LOG.info("No live collector to send metrics to. Metrics to be sent will be discarded. " +
-          "This message will be skipped for the next " + NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS + " times.");
-      } else {
-        nullCollectorCounter.compareAndSet(NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS, 0);
-      }
-    } else {
-      nullCollectorCounter.set(0);
-    }
-    return collectorHost;
-  }
-
-  /**
-   * @param metrics metrics to post, metric values will be aligned by minute mark,
-   *                last uncompleted minute will be cached to post in future iteration
-   */
-  protected boolean emitMetrics(TimelineMetrics metrics) {
-    return emitMetrics(metrics, false);
-  }
-
-  /**
-   * @param metrics metrics to post, if postAllCachedMetrics is false metric values will be aligned by minute mark,
-   *                last uncompleted minute will be cached to post in future iteration
-   * @param postAllCachedMetrics if set to true all cached metrics will be posted, ignoring the minute aligning
-   * @return
-   */
-  protected boolean emitMetrics(TimelineMetrics metrics, boolean postAllCachedMetrics) {
-    String connectUrl;
-    boolean validCollectorHost = true;
-
-    if (isHostInMemoryAggregationEnabled()) {
-      String hostname = "localhost";
-      if (getHostInMemoryAggregationProtocol().equalsIgnoreCase("https")) {
-        hostname = getHostname();
-      }
-      connectUrl = constructTimelineMetricUri(getHostInMemoryAggregationProtocol(), hostname, String.valueOf(getHostInMemoryAggregationPort()));
-    } else {
-      String collectorHost  = getCurrentCollectorHost();
-      if (collectorHost == null) {
-        validCollectorHost = false;
-      }
-      connectUrl = getCollectorUri(collectorHost);
-    }
-
-    TimelineMetrics metricsToEmit = alignMetricsByMinuteMark(metrics);
-
-    if (postAllCachedMetrics) {
-      for (TimelineMetric timelineMetric : metricsPostCache.asMap().values()) {
-        metricsToEmit.addOrMergeTimelineMetric(timelineMetric);
-      }
-      metricsPostCache.invalidateAll();
-    }
-
-    if (validCollectorHost) {
-      String jsonData = null;
-      LOG.debug("EmitMetrics connectUrl = "  + connectUrl);
-      try {
-        jsonData = mapper.writeValueAsString(metricsToEmit);
-      } catch (IOException e) {
-        LOG.error("Unable to parse metrics", e);
-      }
-      if (jsonData != null) {
-        return emitMetricsJson(connectUrl, jsonData);
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Get the associated app cookie manager.
-   *
-   * @return the app cookie manager
-   */
-  public synchronized AppCookieManager getAppCookieManager() {
-    if (appCookieManager == null) {
-      appCookieManager = new AppCookieManager();
-    }
-    return appCookieManager;
-  }
-
-  /**
-   * Align metrics by the minutes so that only complete minutes are send.
-   * Not completed minutes data points will be cached and posted when the minute will be completed.
-   * Cached metrics are merged with currently posting metrics
-   * e.g:
-   * first iteration if metrics from 00m15s to 01m15s are processed,
-   *               then metrics from 00m15s to 00m59s will be posted
-   *                        and from 01m00s to 01m15s will be cached
-   * second iteration   metrics from 01m25s to 02m55s are processed,
-   *     cached metrics from previous call will be merged with current,
-   *                    metrics from 01m00s to 02m55s will be posted, cache will be empty
-   * @param metrics
-   * @return
-   */
-  protected TimelineMetrics alignMetricsByMinuteMark(TimelineMetrics metrics) {
-    TimelineMetrics allMetricsToPost = new TimelineMetrics();
-
-    for (TimelineMetric metric : metrics.getMetrics()) {
-      TimelineMetric cachedMetric = metricsPostCache.getIfPresent(metric.getMetricName());
-      if (cachedMetric != null) {
-        metric.addMetricValues(cachedMetric.getMetricValues());
-        metricsPostCache.invalidate(metric.getMetricName());
-      }
-    }
-
-    for (TimelineMetric metric : metrics.getMetrics()) {
-      TreeMap<Long, Double> valuesToCache = new TreeMap<>();
-      TreeMap<Long, Double> valuesToPost = metric.getMetricValues();
-
-      // in case there can't be any more datapoints in last minute just post the metrics,
-      // otherwise need to cut off and cache the last uncompleted minute
-      if (!(valuesToPost.lastKey() % 60000 > 60000 - collectionPeriodMillis)) {
-        Long lastMinute = valuesToPost.lastKey() / 60000;
-        while (!valuesToPost.isEmpty() && valuesToPost.lastKey() / 60000 == lastMinute) {
-          valuesToCache.put(valuesToPost.lastKey(), valuesToPost.get(valuesToPost.lastKey()));
-          valuesToPost.remove(valuesToPost.lastKey());
-        }
-      }
-
-      if (!valuesToCache.isEmpty()) {
-        TimelineMetric metricToCache = new TimelineMetric(metric);
-        metricToCache.setMetricValues(valuesToCache);
-        metricsPostCache.put(metricToCache.getMetricName(), metricToCache);
-      }
-
-      if (!valuesToPost.isEmpty()) {
-        TimelineMetric metricToPost = new TimelineMetric(metric);
-        metricToPost.setMetricValues(valuesToPost);
-        allMetricsToPost.addOrMergeTimelineMetric(metricToPost);
-      }
-    }
-
-    return allMetricsToPost;
-  }
-
-  /**
-   * Cleans up and closes an input stream
-   * see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
-   * @param is the InputStream to clean up
-   * @return string read from the InputStream
-   * @throws IOException
-   */
-  protected String cleanupInputStream(InputStream is) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    if (is != null) {
-      try (
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr)
-      ) {
-        // read the response body
-        String line;
-        while ((line = br.readLine()) != null) {
-          if (LOG.isDebugEnabled()) {
-            sb.append(line);
-          }
-        }
-      } finally {
-        is.close();
-      }
-    }
-    return sb.toString();
-  }
-
-  // Get a connection
-  protected HttpURLConnection getConnection(String spec) throws IOException {
-    return (HttpURLConnection) new URL(spec).openConnection();
-  }
-
-  // Get an ssl connection
-  protected HttpsURLConnection getSSLConnection(String spec)
-    throws IOException, IllegalStateException {
-
-    HttpsURLConnection connection = (HttpsURLConnection) (new URL(spec).openConnection());
-
-    connection.setSSLSocketFactory(sslSocketFactory);
-
-    return connection;
-  }
-
-  protected void loadTruststore(String trustStorePath, String trustStoreType,
-                                String trustStorePassword) {
-    if (sslSocketFactory == null) {
-      if (trustStorePath == null || trustStorePassword == null) {
-        String msg = "Can't load TrustStore. Truststore path or password is not set.";
-        LOG.error(msg);
-        throw new IllegalStateException(msg);
-      }
-      FileInputStream in = null;
-      try {
-        in = new FileInputStream(new File(trustStorePath));
-        KeyStore store = KeyStore.getInstance(trustStoreType == null ?
-          KeyStore.getDefaultType() : trustStoreType);
-        store.load(in, trustStorePassword.toCharArray());
-        TrustManagerFactory tmf = TrustManagerFactory
-          .getInstance(TrustManagerFactory.getDefaultAlgorithm());
-        tmf.init(store);
-        SSLContext context = SSLContext.getInstance("TLS");
-        context.init(null, tmf.getTrustManagers(), null);
-        sslSocketFactory = context.getSocketFactory();
-      } catch (Exception e) {
-        LOG.error("Unable to load TrustStore", e);
-      } finally {
-        if (in != null) {
-          try {
-            in.close();
-          } catch (IOException e) {
-            LOG.error("Unable to load TrustStore", e);
-          }
-        }
-      }
-    }
-  }
-
-  /**
-   * Find appropriate write shard for this sink using the {@link org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardStrategy}
-   *
-   * 1. Use configured collector(s) to discover available collectors
-   * 2. If configured collector(s) are unresponsive check Zookeeper to find live hosts
-   * 3. Refresh known collector list using ZK
-   * 4. Default: Return configured collector with no side effect due to discovery.
-   *
-   * throws {#link MetricsSinkInitializationException} if called before
-   * initialization, not other side effect
-   *
-   * @return String Collector hostname
-   */
-  protected synchronized String findPreferredCollectHost() {
-    if (!isInitializedForHA) {
-      init();
-    }
-
-    shardExpired = false;
-    // Auto expire and re-calculate after 1 hour
-    if (targetCollectorHostSupplier != null) {
-      String targetCollector = targetCollectorHostSupplier.get();
-      if (targetCollector != null) {
-        return targetCollector;
-      }
-    }
-
-    // Reach out to all configured collectors before Zookeeper
-    Collection<String> collectorHosts = getConfiguredCollectorHosts();
-    refreshCollectorsFromConfigured(collectorHosts);
-
-    // Lookup Zookeeper for live hosts - max 10 seconds wait time
-    long currentTime = System.currentTimeMillis();
-    if (allKnownLiveCollectors.size() == 0 && getZookeeperQuorum() != null
-      && (currentTime - lastFailedZkRequestTime) > zookeeperBackoffTimeMillis) {
-
-      LOG.debug("No live collectors from configuration. Requesting zookeeper...");
-      allKnownLiveCollectors.addAll(collectorHAHelper.findLiveCollectorHostsFromZNode());
-      boolean noNewCollectorFromZk = true;
-      for (String collectorHostFromZk : allKnownLiveCollectors) {
-        if (!collectorHosts.contains(collectorHostFromZk)) {
-          noNewCollectorFromZk = false;
-          break;
-        }
-      }
-      if (noNewCollectorFromZk) {
-        LOG.debug("No new collector was found from Zookeeper. Will not request zookeeper for " + zookeeperBackoffTimeMillis + " millis");
-        lastFailedZkRequestTime = System.currentTimeMillis();
-      }
-    }
-
-    if (allKnownLiveCollectors.size() != 0) {
-      targetCollectorHostSupplier = Suppliers.memoizeWithExpiration(
-        new Supplier<String>() {
-          @Override
-          public String get() {
-            //shardExpired flag is used to determine if the Supplier.get() is invoked through the
-            // findPreferredCollectHost method (No need to refresh collector hosts
-            // OR
-            // through Expiry (Refresh needed to pick up dead collectors that might have not become alive).
-            if (shardExpired) {
-              refreshCollectorsFromConfigured(getConfiguredCollectorHosts());
-            }
-            return metricSinkWriteShardStrategy.findCollectorShard(new ArrayList<>(allKnownLiveCollectors));
-          }
-        },  // random.nextInt(max - min + 1) + min # (60 to 75 minutes)
-        rand.nextInt(COLLECTOR_HOST_CACHE_MAX_EXPIRATION_MINUTES
-          - COLLECTOR_HOST_CACHE_MIN_EXPIRATION_MINUTES + 1)
-          + COLLECTOR_HOST_CACHE_MIN_EXPIRATION_MINUTES,
-        TimeUnit.MINUTES
-      );
-
-      String collectorHost = targetCollectorHostSupplier.get();
-      shardExpired = true;
-      return collectorHost;
-    }
-    LOG.debug("Couldn't find any live collectors. Returning null");
-    shardExpired = true;
-    return null;
-  }
-
-  private void refreshCollectorsFromConfigured(Collection<String> collectorHosts) {
-
-    LOG.debug("Trying to find live collector host from : " + collectorHosts);
-    if (collectorHosts != null && !collectorHosts.isEmpty()) {
-      for (String hostStr : collectorHosts) {
-        hostStr = hostStr.trim();
-        if (!hostStr.isEmpty()) {
-          try {
-            Collection<String> liveHosts = findLiveCollectorHostsFromKnownCollector(hostStr, getCollectorPort());
-            // Update live Hosts - current host will already be a part of this
-            for (String host : liveHosts) {
-              allKnownLiveCollectors.add(host);
-            }
-            break; // Found at least 1 live collector
-          } catch (MetricCollectorUnavailableException e) {
-            LOG.debug("Collector " + hostStr + " is not longer live. Removing " +
-              "it from list of know live collector hosts : " + allKnownLiveCollectors);
-            allKnownLiveCollectors.remove(hostStr);
-          }
-        }
-      }
-    }
-  }
-
-  Collection<String> findLiveCollectorHostsFromKnownCollector(String host, String port) throws MetricCollectorUnavailableException {
-    List<String> collectors = new ArrayList<>();
-    HttpURLConnection connection = null;
-    StringBuilder sb = new StringBuilder(getCollectorProtocol());
-    sb.append("://");
-    sb.append(host);
-    sb.append(":");
-    sb.append(port);
-    sb.append(COLLECTOR_LIVE_NODES_PATH);
-    String connectUrl = sb.toString();
-    LOG.debug("Requesting live collector nodes : " + connectUrl);
-    try {
-      connection = getCollectorProtocol().startsWith("https") ?
-        getSSLConnection(connectUrl) : getConnection(connectUrl);
-
-      connection.setRequestMethod("GET");
-      // 5 seconds for this op is plenty of wait time
-      connection.setConnectTimeout(3000);
-      connection.setReadTimeout(2000);
-
-      int responseCode = connection.getResponseCode();
-      if (responseCode == 200) {
-        try (InputStream in = connection.getInputStream()) {
-          StringWriter writer = new StringWriter();
-          IOUtils.copy(in, writer);
-          try {
-            collectors = gson.fromJson(writer.toString(), new TypeToken<List<String>>(){}.getType());
-          } catch (JsonSyntaxException jse) {
-            // Swallow this at the behest of still trying to POST
-            LOG.debug("Exception deserializing the json data on live " +
-              "collector nodes.", jse);
-          }
-        }
-      }
-
-    } catch (IOException ioe) {
-      StringBuilder errorMessage =
-        new StringBuilder("Unable to connect to collector, " + connectUrl);
-      try {
-        if ((connection != null)) {
-          errorMessage.append(cleanupInputStream(connection.getErrorStream()));
-        }
-      } catch (IOException e) {
-        //NOP
-      }
-      LOG.debug(errorMessage);
-      LOG.debug(ioe);
-      String warnMsg = "Unable to connect to collector to find live nodes.";
-      throw new MetricCollectorUnavailableException(warnMsg);
-    }
-    return collectors;
-  }
-
-  // Constructing without UriBuilder to avoid unfavorable httpclient
-  // dependencies
-  protected String constructTimelineMetricUri(String protocol, String host, String port) {
-    StringBuilder sb = new StringBuilder(protocol);
-    sb.append("://");
-    sb.append(host);
-    sb.append(":");
-    sb.append(port);
-    sb.append(WS_V1_TIMELINE_METRICS);
-    return sb.toString();
-  }
-
-  /**
-   * Parses input Sting of format "host1,host2" into Collection of hostnames
-   */
-  public Collection<String> parseHostsStringIntoCollection(String hostsString) {
-    Set<String> hosts = new HashSet<>();
-
-    if (StringUtils.isEmpty(hostsString)) {
-      LOG.error("No Metric collector configured.");
-      return hosts;
-    }
-
-
-    for (String host : hostsString.split(",")) {
-      if (StringUtils.isEmpty(host))
-        continue;
-      hosts.add(host.trim());
-    }
-
-    return hosts;
-  }
-
-
-  private long getZookeeperBackoffTimeMillis() {
-    return (zookeeperMinBackoffTimeMins +
-      rand.nextInt(zookeeperMaxBackoffTimeMins - zookeeperMinBackoffTimeMins + 1)) * 60*1000l;
-  }
-
-  //for now it's used only for testing
-  protected Cache<String, TimelineMetric> getMetricsPostCache() {
-    return metricsPostCache;
-  }
-
-  /**
-   * Get a pre-formatted URI for the collector
-   */
-  abstract protected String getCollectorUri(String host);
-
-  abstract protected String getCollectorProtocol();
-
-  abstract protected String getCollectorPort();
-
-  /**
-   * How soon to timeout on the emit calls in seconds.
-   */
-  abstract protected int getTimeoutSeconds();
-
-  /**
-   * Get the zookeeper quorum for the cluster used to find collector
-   * @return String "host1:port1,host2:port2"
-   */
-  abstract protected String getZookeeperQuorum();
-
-  /**
-   * Get pre-configured list of collectors hosts available
-   * @return Collection<String> host1,host2
-   */
-  abstract protected Collection<String> getConfiguredCollectorHosts();
-
-  /**
-   * Get hostname used for calculating write shard.
-   * @return String "host1"
-   */
-  abstract protected String getHostname();
-
-  /**
-   * Check if host in-memory aggregation is enabled
-   * @return
-   */
-  abstract protected boolean isHostInMemoryAggregationEnabled();
-
-  /**
-   * In memory aggregation port
-   * @return
-   */
-  abstract protected int getHostInMemoryAggregationPort();
-
-  /**
-   * In memory aggregation protocol
-   * @return
-   */
-  abstract protected String getHostInMemoryAggregationProtocol();
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregationResult.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregationResult.java
deleted file mode 100644
index c903e3d..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregationResult.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.Set;
-
-@XmlRootElement(name="AggregationResult")
-public class AggregationResult {
-    protected Set<TimelineMetricWithAggregatedValues> result;
-    protected Long timeInMilis;
-
-    @Override
-    public String toString() {
-        return "AggregationResult{" +
-                "result=" + result +
-                ", timeInMilis=" + timeInMilis +
-                '}';
-    }
-
-    public AggregationResult() {
-    }
-
-    public AggregationResult(Set<TimelineMetricWithAggregatedValues> result, Long timeInMilis) {
-        this.result = result;
-        this.timeInMilis = timeInMilis;
-    }
-    @XmlElement
-    public Set<TimelineMetricWithAggregatedValues> getResult() {
-        return result;
-    }
-
-    public void setResult(Set<TimelineMetricWithAggregatedValues> result) {
-        this.result = result;
-    }
-    @XmlElement
-    public Long getTimeInMilis() {
-        return timeInMilis;
-    }
-
-    public void setTimeInMilis(Long timeInMilis) {
-        this.timeInMilis = timeInMilis;
-    }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
deleted file mode 100644
index bcba238..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import java.io.IOException;
-import java.net.URI;
-import java.security.Principal;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.http.Header;
-import org.apache.http.HeaderElement;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpHost;
-import org.apache.http.HttpRequest;
-import org.apache.http.HttpResponse;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.Credentials;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpOptions;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.client.params.AuthPolicy;
-import org.apache.http.impl.auth.SPNegoSchemeFactory;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.util.EntityUtils;
-
-/**
- * Handles SPNego authentication as a client of hadoop service, caches
- * hadoop.auth cookie returned by hadoop service on successful SPNego
- * authentication. Refreshes hadoop.auth cookie on demand if the cookie has
- * expired.
- *
- */
-public class AppCookieManager {
-
-  static final String HADOOP_AUTH = "hadoop.auth";
-  private static final String HADOOP_AUTH_EQ = "hadoop.auth=";
-  private static final String SET_COOKIE = "Set-Cookie";
-
-  private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
-
-  private Map<String, String> endpointCookieMap = new ConcurrentHashMap<String, String>();
-  private static Log LOG = LogFactory.getLog(AppCookieManager.class);
-
-  /**
-   * Utility method to exercise AppCookieManager directly
-   * @param args element 0 of args should be a URL to hadoop service protected by SPengo
-   * @throws IOException in case of errors
-   */
-  public static void main(String[] args) throws IOException {
-    new AppCookieManager().getAppCookie(args[0], false);
-  }
-
-  public AppCookieManager() {
-  }
-
-  /**
-   * Returns hadoop.auth cookie, doing needed SPNego authentication
-   *
-   * @param endpoint
-   *          the URL of the Hadoop service
-   * @param refresh
-   *          flag indicating wehther to refresh the cookie, if
-   *          <code>true</code>, we do a new SPNego authentication and refresh
-   *          the cookie even if the cookie already exists in local cache
-   * @return hadoop.auth cookie value
-   * @throws IOException
-   *           in case of problem getting hadoop.auth cookie
-   */
-  public String getAppCookie(String endpoint, boolean refresh)
-      throws IOException {
-
-    HttpUriRequest outboundRequest = new HttpGet(endpoint);
-    URI uri = outboundRequest.getURI();
-    String scheme = uri.getScheme();
-    String host = uri.getHost();
-    int port = uri.getPort();
-    String path = uri.getPath();
-    if (!refresh) {
-      String appCookie = endpointCookieMap.get(endpoint);
-      if (appCookie != null) {
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("got cached cookie");
-        }
-        return appCookie;
-      }
-    }
-
-    clearAppCookie(endpoint);
-
-    DefaultHttpClient client = new DefaultHttpClient();
-    SPNegoSchemeFactory spNegoSF = new SPNegoSchemeFactory(/* stripPort */true);
-    client.getAuthSchemes().register(AuthPolicy.SPNEGO, spNegoSF);
-    client.getCredentialsProvider().setCredentials(
-        new AuthScope(/* host */null, /* port */-1, /* realm */null),
-        EMPTY_JAAS_CREDENTIALS);
-
-    String hadoopAuthCookie = null;
-    HttpResponse httpResponse = null;
-    try {
-      HttpHost httpHost = new HttpHost(host, port, scheme);
-      HttpRequest httpRequest = new HttpOptions(path);
-      httpResponse = client.execute(httpHost, httpRequest);
-      Header[] headers = httpResponse.getHeaders(SET_COOKIE);
-      if (LOG.isDebugEnabled()) {
-        for (Header header : headers) {
-          LOG.debug(header.getName() + " : " + header.getValue());
-        }
-      }
-      hadoopAuthCookie = getHadoopAuthCookieValue(headers);
-      if (hadoopAuthCookie == null) {
-        int statusCode = httpResponse.getStatusLine().getStatusCode();
-        HttpEntity entity = httpResponse.getEntity();
-        String responseBody = entity != null ? EntityUtils.toString(entity) : null;
-        LOG.error("SPNego authentication failed with statusCode = " + statusCode + ", responseBody = " + responseBody + ", can not get hadoop.auth cookie for URL: " + endpoint);
-        return null;
-      }
-    } finally {
-      if (httpResponse != null) {
-        HttpEntity entity = httpResponse.getEntity();
-        if (entity != null) {
-          entity.getContent().close();
-        }
-      }
-
-    }
-
-    hadoopAuthCookie = HADOOP_AUTH_EQ + quote(hadoopAuthCookie);
-    setAppCookie(endpoint, hadoopAuthCookie);
-    if (LOG.isInfoEnabled()) {
-      LOG.info("Successful SPNego authentication to URL:" + uri.toString());
-    }
-    return hadoopAuthCookie;
-  }
-
-
-  /**
-   * Returns the cached app cookie
-   *  @param endpoint the hadoop end point we authenticate to
-   * @return the cached app cookie, can be null
-   */
-  public String getCachedAppCookie(String endpoint) {
-    return endpointCookieMap.get(endpoint);
-  }
-
-  /**
-   *  Sets the cached app cookie cache
-   *  @param endpoint the hadoop end point we authenticate to
-   *  @param appCookie the app cookie
-   */
-  private void setAppCookie(String endpoint, String appCookie) {
-    endpointCookieMap.put(endpoint, appCookie);
-  }
-
-  /**
-   *  Clears the cached app cookie
-   *  @param endpoint the hadoop end point we authenticate to
-   */
-  private void clearAppCookie(String endpoint) {
-    endpointCookieMap.remove(endpoint);
-  }
-
-  static String quote(String s) {
-    return s == null ? s : "\"" + s + "\"";
-  }
-
-  static String getHadoopAuthCookieValue(Header[] headers) {
-    if (headers == null) {
-      return null;
-    }
-    for (Header header : headers) {
-      HeaderElement[] elements = header.getElements();
-      for (HeaderElement element : elements) {
-        String cookieName = element.getName();
-        if (cookieName.equals(HADOOP_AUTH)) {
-          if (element.getValue() != null) {
-            String trimmedVal = element.getValue().trim();
-            if (!trimmedVal.isEmpty()) {
-              return trimmedVal;
-            }
-          }
-        }
-      }
-    }
-    return null;
-  }
-
-
-  private static class EmptyJaasCredentials implements Credentials {
-
-    public String getPassword() {
-      return null;
-    }
-
-    public Principal getUserPrincipal() {
-      return null;
-    }
-
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/ContainerMetric.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/ContainerMetric.java
deleted file mode 100644
index 0e2051b..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/ContainerMetric.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-/**
- * This differs from TimelineMetric in that this class contains all the fields
- * for a single metric.
- */
-@XmlRootElement(name = "containermetric")
-@XmlAccessorType(XmlAccessType.FIELD)
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class ContainerMetric {
-  private String hostName;
-  private String containerId;
-  private int pmemLimit;
-  private int vmemLimit;
-  private int pmemUsedAvg;
-  private int pmemUsedMin;
-  private int pmemUsedMax;
-  private int pmem50Pct;
-  private int pmem75Pct;
-  private int pmem90Pct;
-  private int pmem95Pct;
-  private int pmem99Pct;
-  private long launchDuration;
-  private long localizationDuration;
-  private long startTime;
-  private long finishTime;
-  private int exitCode;
-
-
-  public ContainerMetric() {
-
-  }
-
-  public String getHostName() {
-    return hostName;
-  }
-
-  public void setHostName(String hostName) {
-    this.hostName = hostName;
-  }
-
-  public String getContainerId() {
-    return containerId;
-  }
-
-  public void setContainerId(String containerId) {
-    this.containerId = containerId;
-  }
-
-  public int getPmemLimit() {
-    return pmemLimit;
-  }
-
-  public void setPmemLimit(int pmemLimit) {
-    this.pmemLimit = pmemLimit;
-  }
-
-  public int getVmemLimit() {
-    return vmemLimit;
-  }
-
-  public void setVmemLimit(int vmemLimit) {
-    this.vmemLimit = vmemLimit;
-  }
-
-  public int getPmemUsedAvg() {
-    return pmemUsedAvg;
-  }
-
-  public void setPmemUsedAvg(int pmemUsedAvg) {
-    this.pmemUsedAvg = pmemUsedAvg;
-  }
-
-  public int getPmemUsedMin() {
-    return pmemUsedMin;
-  }
-
-  public void setPmemUsedMin(int pmemUsedMin) {
-    this.pmemUsedMin = pmemUsedMin;
-  }
-
-  public int getPmemUsedMax() {
-    return pmemUsedMax;
-  }
-
-  public void setPmemUsedMax(int pmemUsedMax) {
-    this.pmemUsedMax = pmemUsedMax;
-  }
-
-  public int getPmem50Pct() {
-    return pmem50Pct;
-  }
-
-  public void setPmem50Pct(int pmem50Pct) {
-    this.pmem50Pct = pmem50Pct;
-  }
-
-  public int getPmem75Pct() {
-    return pmem75Pct;
-  }
-
-  public void setPmem75Pct(int pmem75Pct) {
-    this.pmem75Pct = pmem75Pct;
-  }
-
-  public int getPmem90Pct() {
-    return pmem90Pct;
-  }
-
-  public void setPmem90Pct(int pmem90Pct) {
-    this.pmem90Pct = pmem90Pct;
-  }
-
-  public int getPmem95Pct() {
-    return pmem95Pct;
-  }
-
-  public void setPmem95Pct(int pmem95Pct) {
-    this.pmem95Pct = pmem95Pct;
-  }
-
-  public int getPmem99Pct() {
-    return pmem99Pct;
-  }
-
-  public void setPmem99Pct(int pmem99Pct) {
-    this.pmem99Pct = pmem99Pct;
-  }
-
-  public long getLaunchDuration() {
-    return launchDuration;
-  }
-
-  public void setLaunchDuration(long launchDuration) {
-    this.launchDuration = launchDuration;
-  }
-
-  public long getLocalizationDuration() {
-    return localizationDuration;
-  }
-
-  public void setLocalizationDuration(long localizationDuration) {
-    this.localizationDuration = localizationDuration;
-  }
-
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
-  public long getFinishTime() {
-    return finishTime;
-  }
-
-  public void setFinishTime(long finishTime) {
-    this.finishTime = finishTime;
-  }
-
-  public int getExitCode() {
-    return exitCode;
-  }
-
-  public void setExitCode(int exitCode) {
-    this.exitCode = exitCode;
-  }
-
-  @Override
-  public String toString() {
-    return "ContainerMetric{" +
-        "hostName='" + hostName + '\'' +
-        ", containerId='" + containerId + '\'' +
-        ", pmemLimit=" + pmemLimit +
-        ", vmemLimit=" + vmemLimit +
-        ", pmemUsedAvg=" + pmemUsedAvg +
-        ", pmemUsedMin=" + pmemUsedMin +
-        ", pmemUsedMax=" + pmemUsedMax +
-        ", pmem50Pct=" + pmem50Pct +
-        ", pmem75Pct=" + pmem75Pct +
-        ", pmem90Pct=" + pmem90Pct +
-        ", pmem95Pct=" + pmem95Pct +
-        ", pmem99Pct=" + pmem99Pct +
-        ", launchDuration=" + launchDuration +
-        ", localizationDuration=" + localizationDuration +
-        ", startTime=" + startTime +
-        ", finishTime=" + finishTime +
-        ", exitCode=" + exitCode +
-        '}';
-  }
-}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java
deleted file mode 100644
index 01230af..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-/**
- * Marker for checked Exceptions thrown from Metadata management layer.
- */
-public class MetadataException extends Exception {
-  // Default constructor
-  public MetadataException(String message) {
-    super(message);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricAggregate.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricAggregate.java
deleted file mode 100644
index 84cba0e..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricAggregate.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.map.ObjectMapper;
-
-import java.io.IOException;
-
-/**
-*
-*/
-@JsonSubTypes({@JsonSubTypes.Type(value = MetricClusterAggregate.class),
-  @JsonSubTypes.Type(value = MetricHostAggregate.class)})
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class MetricAggregate {
-  private static final ObjectMapper mapper = new ObjectMapper();
-
-  protected Double sum = 0.0;
-  protected Double deviation;
-  protected Double max = Double.MIN_VALUE;
-  protected Double min = Double.MAX_VALUE;
-
-  public MetricAggregate() {
-  }
-
-  MetricAggregate(Double sum, Double deviation, Double max,
-                  Double min) {
-    this.sum = sum;
-    this.deviation = deviation;
-    this.max = max;
-    this.min = min;
-  }
-
-  public void updateSum(Double sum) {
-    this.sum += sum;
-  }
-
-  public void updateMax(Double max) {
-    if (max > this.max) {
-      this.max = max;
-    }
-  }
-
-  public void updateMin(Double min) {
-    if (min < this.min) {
-      this.min = min;
-    }
-  }
-
-  @JsonProperty("sum")
-  public Double getSum() {
-    return sum;
-  }
-
-  @JsonProperty("deviation")
-  public Double getDeviation() {
-    return deviation;
-  }
-
-  @JsonProperty("max")
-  public Double getMax() {
-    return max;
-  }
-
-  @JsonProperty("min")
-  public Double getMin() {
-    return min;
-  }
-
-  public void setSum(Double sum) {
-    this.sum = sum;
-  }
-
-  public void setDeviation(Double deviation) {
-    this.deviation = deviation;
-  }
-
-  public void setMax(Double max) {
-    this.max = max;
-  }
-
-  public void setMin(Double min) {
-    this.min = min;
-  }
-
-  public String toJSON() throws IOException {
-    return mapper.writeValueAsString(this);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricClusterAggregate.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricClusterAggregate.java
deleted file mode 100644
index 7ef2c1d..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricClusterAggregate.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
-
-/**
-*
-*/
-public class MetricClusterAggregate extends MetricAggregate {
-  private int numberOfHosts;
-
-  @JsonCreator
-  public MetricClusterAggregate() {
-  }
-
-  public MetricClusterAggregate(Double sum, int numberOfHosts, Double deviation,
-                         Double max, Double min) {
-    super(sum, deviation, max, min);
-    this.numberOfHosts = numberOfHosts;
-  }
-
-  @JsonProperty("numberOfHosts")
-  public int getNumberOfHosts() {
-    return numberOfHosts;
-  }
-
-  public void updateNumberOfHosts(int count) {
-    this.numberOfHosts += count;
-  }
-
-  public void setNumberOfHosts(int numberOfHosts) {
-    this.numberOfHosts = numberOfHosts;
-  }
-
-  /**
-   * Find and update min, max and avg for a minute
-   */
-  public void updateAggregates(MetricClusterAggregate hostAggregate) {
-    updateMax(hostAggregate.getMax());
-    updateMin(hostAggregate.getMin());
-    updateSum(hostAggregate.getSum());
-    updateNumberOfHosts(hostAggregate.getNumberOfHosts());
-  }
-
-  @Override
-  public String toString() {
-    return "MetricAggregate{" +
-      "sum=" + sum +
-      ", numberOfHosts=" + numberOfHosts +
-      ", deviation=" + deviation +
-      ", max=" + max +
-      ", min=" + min +
-      '}';
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricHostAggregate.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricHostAggregate.java
deleted file mode 100644
index e190913..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricHostAggregate.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
-
-/**
- * Represents a collection of minute based aggregation of values for
- * resolution greater than a minute.
- */
-public class MetricHostAggregate extends MetricAggregate {
-
-  private long numberOfSamples = 0;
-
-  @JsonCreator
-  public MetricHostAggregate() {
-    super(0.0, 0.0, Double.MIN_VALUE, Double.MAX_VALUE);
-  }
-
-  public MetricHostAggregate(Double sum, int numberOfSamples,
-                             Double deviation,
-                             Double max, Double min) {
-    super(sum, deviation, max, min);
-    this.numberOfSamples = numberOfSamples;
-  }
-
-  @JsonProperty("numberOfSamples")
-  public long getNumberOfSamples() {
-    return numberOfSamples == 0 ? 1 : numberOfSamples;
-  }
-
-  public void updateNumberOfSamples(long count) {
-    this.numberOfSamples += count;
-  }
-
-  public void setNumberOfSamples(long numberOfSamples) {
-    this.numberOfSamples = numberOfSamples;
-  }
-
-  public double calculateAverage() {
-    return sum / numberOfSamples;
-  }
-
-  /**
-   * Find and update min, max and avg for a minute
-   */
-  public void updateAggregates(MetricHostAggregate hostAggregate) {
-    updateMax(hostAggregate.getMax());
-    updateMin(hostAggregate.getMin());
-    updateSum(hostAggregate.getSum());
-    updateNumberOfSamples(hostAggregate.getNumberOfSamples());
-  }
-
-  @Override
-  public String toString() {
-    return "MetricHostAggregate{" +
-      "sum=" + sum +
-      ", numberOfSamples=" + numberOfSamples +
-      ", deviation=" + deviation +
-      ", max=" + max +
-      ", min=" + min +
-      '}';
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricsSinkInitializationException.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricsSinkInitializationException.java
deleted file mode 100644
index 5760b34..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetricsSinkInitializationException.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-public class MetricsSinkInitializationException extends RuntimeException {
-  // Default constructor
-  public MetricsSinkInitializationException(String message) {
-    super(message);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PostProcessingUtil.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PostProcessingUtil.java
deleted file mode 100644
index fb09913..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PostProcessingUtil.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
-import org.apache.commons.math3.analysis.polynomials.PolynomialFunction;
-import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-public class PostProcessingUtil {
-
-  /*
-    Helper function to interpolate missing data on a series.
-  */
-  public static Map<Long, Double> interpolateMissingData(Map<Long, Double> metricValues, long expectedInterval) {
-
-    if (metricValues == null)
-      return null;
-
-    Long prevTime = null;
-    Double prevVal = null;
-    Map<Long, Double> interpolatedMetricValues = new TreeMap<Long, Double>();
-
-    for (Map.Entry<Long, Double> timeValueEntry : metricValues.entrySet()) {
-      Long currTime = timeValueEntry.getKey();
-      Double currVal = timeValueEntry.getValue();
-
-      if (prevTime != null) {
-        Long stepTime = prevTime;
-        while ((currTime - stepTime) > expectedInterval) {
-          stepTime+=expectedInterval;
-          double interpolatedValue = interpolate(stepTime,
-            prevTime, prevVal,
-            currTime, currVal);
-          interpolatedMetricValues.put(stepTime, interpolatedValue);
-        }
-      }
-
-      interpolatedMetricValues.put(currTime, currVal);
-      prevTime = currTime;
-      prevVal = currVal;
-    }
-    return interpolatedMetricValues;
-  }
-
-  public static Double interpolate(Long t, Long t1, Double m1,
-                                   Long t2, Double m2) {
-    //Linear Interpolation : y = y0 + (y1 - y0) * ((x - x0) / (x1 - x0))
-    if (m1 == null && m2 == null) {
-      return null;
-    }
-
-    if (m1 == null)
-      return m2;
-
-    if (m2 == null)
-      return m1;
-
-    if (t1 == null || t2 == null)
-      return null;
-
-    double slope = (m2 - m1) / (t2 - t1);
-    return m1 +  slope * (t - t1);
-  }
-
-  public static Map<Long, Double> interpolate(Map<Long, Double> valuesMap, List<Long> requiredTimestamps) {
-
-    LinearInterpolator linearInterpolator = new LinearInterpolator();
-
-    if (valuesMap == null || valuesMap.isEmpty()) {
-      return null;
-    }
-    if (requiredTimestamps == null || requiredTimestamps.isEmpty()) {
-      return null;
-    }
-
-    Map<Long, Double> interpolatedValuesMap = new HashMap<>();
-
-    if (valuesMap.size() == 1) {
-      //Just one value present in the window. Use that value to interpolate all required timestamps.
-      Double value  = null;
-      for (Map.Entry<Long, Double> entry : valuesMap.entrySet()) {
-        value = entry.getValue();
-      }
-      for (Long requiredTs : requiredTimestamps) {
-        interpolatedValuesMap.put(requiredTs, value);
-      }
-      return interpolatedValuesMap;
-    }
-
-    double[] timestamps = new double[valuesMap.size()];
-    double[] metrics = new double[valuesMap.size()];
-
-    int i = 0;
-    for (Map.Entry<Long, Double> entry : valuesMap.entrySet()) {
-      timestamps[i] = (double) entry.getKey();
-      metrics[i++] = entry.getValue();
-    }
-
-    PolynomialSplineFunction function = linearInterpolator.interpolate(timestamps, metrics);
-    PolynomialFunction[] splines = function.getPolynomials();
-    PolynomialFunction first = splines[0];
-
-    for (Long requiredTs : requiredTimestamps) {
-
-      Double interpolatedValue = null;
-      if (timestampInRange(requiredTs, timestamps[0], timestamps[timestamps.length - 1])) {
-        /*
-          Interpolation Case
-          Required TS is within range of the set of values used for interpolation.
-          Hence, we can use library to get the interpolated value.
-         */
-        interpolatedValue = function.value((double) requiredTs);
-      } else {
-        /*
-        Extrapolation Case
-        Required TS outside range of the set of values used for interpolation.
-        We will use the coefficients to make best effort extrapolation
-        y(x)= y1 + m * (x−x1)
-        where, m = (y2−y1)/(x2−x1)
-         */
-        if (first.getCoefficients() != null && first.getCoefficients().length > 0) {
-          /*
-          y = c0 + c1x
-          where c0, c1 are coefficients
-          c1 will not be present if slope is zero.
-           */
-          Double y1 = first.getCoefficients()[0];
-          Double m = (first.getCoefficients().length > 1) ? first.getCoefficients()[1] : 0.0;
-          interpolatedValue = y1 + m * (requiredTs - timestamps[0]);
-        }
-      }
-
-      if (interpolatedValue != null && interpolatedValue >= 0.0) {
-        interpolatedValuesMap.put(requiredTs, interpolatedValue);
-      }
-    }
-    return interpolatedValuesMap;
-  }
-
-  private static boolean timestampInRange(Long timestamp, double left, double right) {
-    return (timestamp >= left && timestamp <= right);
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/Precision.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/Precision.java
deleted file mode 100644
index 39bcce0..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/Precision.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-/**
- * Is used to determine metrics aggregate table.
- *
- */
-public enum Precision {
-  SECONDS,
-  MINUTES,
-  HOURS,
-  DAYS;
-
-  public static class PrecisionFormatException extends IllegalArgumentException {
-    public PrecisionFormatException(String message, Throwable cause) {
-      super(message, cause);
-    }
-  }
-
-  public static Precision getPrecision(String precision) throws PrecisionFormatException {
-    if (precision == null || precision.isEmpty()) {
-      return null;
-    }
-    try {
-      return Precision.valueOf(precision.toUpperCase());
-    } catch (IllegalArgumentException e) {
-      throw new PrecisionFormatException("precision should be seconds, " +
-        "minutes, hours or days", e);
-    }
-  }
-
-  public static Precision getPrecision(long startTime, long endTime) {
-    long HOUR = 3600000; // 1 hour
-    long DAY = 86400000; // 1 day
-    long timeRange = endTime - startTime;
-    if (timeRange > 30 * DAY) {
-      return Precision.DAYS;
-    } else if (timeRange > 1 * DAY) {
-      return Precision.HOURS;
-    } else if (timeRange > 2 * HOUR) {
-      return Precision.MINUTES;
-    } else {
-      return Precision.SECONDS;
-    }
-  }
-
-  public static Precision getHigherPrecision(Precision precision) {
-
-    if (precision == null)
-      return null;
-
-    if (precision.equals(Precision.SECONDS)) {
-      return Precision.MINUTES;
-    } else if (precision.equals(Precision.MINUTES)) {
-      return Precision.HOURS;
-    } else if (precision.equals(Precision.HOURS)) {
-      return Precision.DAYS;
-    } else {
-      return null;
-    }
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PrecisionLimitExceededException.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PrecisionLimitExceededException.java
deleted file mode 100644
index 962a071..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PrecisionLimitExceededException.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-public class PrecisionLimitExceededException extends IllegalArgumentException {
-
-  private static final long serialVersionUID = 1L;
-
-  public PrecisionLimitExceededException(String message, Throwable cause) {
-    super(message, cause);
-  }
-
-  public PrecisionLimitExceededException(String message) {
-    super(message);
-  }
-
-  public PrecisionLimitExceededException(Throwable cause) {
-    super(cause);
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/SingleValuedTimelineMetric.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/SingleValuedTimelineMetric.java
deleted file mode 100644
index 83d8e2c..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/SingleValuedTimelineMetric.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-/**
- * This class prevents creating a TreeMap for every instantiation of a metric
- * read from the store. The methods are meant to provide interoperability
- * with @TimelineMetric
- */
-public class SingleValuedTimelineMetric {
-  private Double value;
-  private String metricName;
-  private String appId;
-  private String instanceId;
-  private String hostName;
-  private Long startTime;
-
-  public void setSingleTimeseriesValue(Long startTime, Double value) {
-    this.startTime = startTime;
-    this.value = value;
-  }
-
-  public SingleValuedTimelineMetric(String metricName, String appId,
-                                    String instanceId, String hostName,
-                                    long startTime) {
-    this.metricName = metricName;
-    this.appId = appId;
-    this.instanceId = instanceId;
-    this.hostName = hostName;
-    this.startTime = startTime;
-  }
-
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public Double getValue() {
-    return value;
-  }
-
-  public String getMetricName() {
-    return metricName;
-  }
-
-  public String getAppId() {
-    return appId;
-  }
-
-  public String getInstanceId() {
-    return instanceId;
-  }
-
-  public String getHostName() {
-    return hostName;
-  }
-
-  public boolean equalsExceptTime(TimelineMetric metric) {
-    if (!metricName.equals(metric.getMetricName())) return false;
-    if (hostName != null ? !hostName.equals(metric.getHostName()) : metric.getHostName() != null)
-      return false;
-    if (appId != null ? !appId.equals(metric.getAppId()) : metric.getAppId() != null)
-      return false;
-    if (instanceId != null ? !instanceId.equals(metric.getInstanceId()) : metric.getInstanceId() != null) return false;
-
-    return true;
-  }
-
-  public TimelineMetric getTimelineMetric() {
-    TimelineMetric metric = new TimelineMetric();
-    metric.setMetricName(this.metricName);
-    metric.setAppId(this.appId);
-    metric.setHostName(this.hostName);
-    metric.setInstanceId(this.instanceId);
-    metric.setStartTime(this.startTime);
-    metric.getMetricValues().put(startTime, value);
-    return metric;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
deleted file mode 100644
index a5768c3..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TreeMap;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.map.annotate.JsonDeserialize;
-
-@XmlRootElement(name = "metric")
-@XmlAccessorType(XmlAccessType.NONE)
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class TimelineMetric implements Comparable<TimelineMetric>, Serializable {
-
-  private String metricName;
-  private String appId;
-  private String instanceId = null;
-  private String hostName;
-  private long timestamp;
-  private long startTime;
-  private String type;
-  private String units;
-  private TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
-  private HashMap<String, String> metadata = new HashMap<>();
-
-  // default
-  public TimelineMetric() {
-
-  }
-
-  // To reconstruct TimelineMetric from UUID.
-  public TimelineMetric(String metricName, String hostname, String appId, String instanceId) {
-    this.metricName = metricName;
-    this.hostName = hostname;
-    this.appId = appId;
-    this.instanceId = instanceId;
-  }
-
-  // copy constructor
-  public TimelineMetric(TimelineMetric metric) {
-    setMetricName(metric.getMetricName());
-    setType(metric.getType());
-    setUnits(metric.getUnits());
-    setAppId(metric.getAppId());
-    setInstanceId(metric.getInstanceId());
-    setHostName(metric.getHostName());
-    setStartTime(metric.getStartTime());
-    setMetricValues(new TreeMap<Long, Double>(metric.getMetricValues()));
-  }
-
-  @XmlElement(name = "metricname")
-  public String getMetricName() {
-    return metricName;
-  }
-
-  public void setMetricName(String metricName) {
-    this.metricName = metricName;
-  }
-
-  @XmlElement(name = "appid")
-  public String getAppId() {
-    return appId;
-  }
-
-  public void setAppId(String appId) {
-    this.appId = appId;
-  }
-
-  @XmlElement(name = "instanceid")
-  public String getInstanceId() {
-    return instanceId;
-  }
-
-  public void setInstanceId(String instanceId) {
-    this.instanceId = instanceId;
-  }
-
-  @XmlElement(name = "hostname")
-  public String getHostName() {
-    return hostName;
-  }
-
-  public void setHostName(String hostName) {
-    this.hostName = hostName;
-  }
-
-  @XmlElement(name = "timestamp")
-  public long getTimestamp() {
-        return timestamp;
-      }
-
-  public void setTimestamp(long timestamp) {
-    this.timestamp = timestamp;
-  }
-
-  @XmlElement(name = "starttime")
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
-  @XmlElement(name = "type", defaultValue = "UNDEFINED")
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
-  @XmlElement(name = "units")
-  public String getUnits() {
-    return units;
-  }
-
-  public void setUnits(String units) {
-    this.units = units;
-  }
-
-  @XmlElement(name = "metrics")
-  public TreeMap<Long, Double> getMetricValues() {
-    return metricValues;
-  }
-
-  public void setMetricValues(TreeMap<Long, Double> metricValues) {
-    this.metricValues = metricValues;
-  }
-
-  public void addMetricValues(Map<Long, Double> metricValues) {
-    this.metricValues.putAll(metricValues);
-    if (!this.metricValues.isEmpty()) {
-      this.setStartTime(this.metricValues.firstKey());
-    }
-  }
-
-  @XmlElement(name = "metadata")
-  public HashMap<String,String> getMetadata () {
-    return metadata;
-  }
-
-  public void setMetadata (HashMap<String,String> metadata) {
-    this.metadata = metadata;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    TimelineMetric metric = (TimelineMetric) o;
-
-    if (!metricName.equals(metric.metricName)) return false;
-    if (hostName != null ? !hostName.equals(metric.hostName) : metric.hostName != null)
-      return false;
-    if (appId != null ? !appId.equals(metric.appId) : metric.appId != null)
-      return false;
-    if (instanceId != null ? !instanceId.equals(metric.instanceId) : metric.instanceId != null)
-      return false;
-    if (startTime != metric.startTime) return false;
-
-    return true;
-  }
-
-  public boolean equalsExceptTime(TimelineMetric metric) {
-    if (!metricName.equals(metric.metricName)) return false;
-    if (hostName != null ? !hostName.equals(metric.hostName) : metric.hostName != null)
-      return false;
-    if (appId != null ? !appId.equals(metric.appId) : metric.appId != null)
-      return false;
-    if (instanceId != null ? !instanceId.equals(metric.instanceId) : metric.instanceId != null)
-      return false;
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    int result = metricName.hashCode();
-    result = 31 * result + (appId != null ? appId.hashCode() : 0);
-    result = 31 * result + (instanceId != null ? instanceId.hashCode() : 0);
-    result = 31 * result + (hostName != null ? hostName.hashCode() : 0);
-    result = 31 * result + (int) (startTime ^ (startTime >>> 32));
-    return result;
-  }
-
-  @Override
-  public int compareTo(TimelineMetric other) {
-    if (startTime > other.startTime) {
-      return -1;
-    } else if (startTime < other.startTime) {
-      return 1;
-    } else {
-      return metricName.compareTo(other.metricName);
-    }
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
deleted file mode 100644
index b1266b1..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement(name = "metric_metadata")
-@XmlAccessorType(XmlAccessType.NONE)
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class TimelineMetricMetadata {
-  private String metricName;
-  private String appId;
-  private String instanceId;
-  private byte[] uuid;
-  private String units;
-  private String type = "UNDEFINED";
-  private Long seriesStartTime;
-  boolean supportsAggregates = true;
-  boolean isWhitelisted = false;
-  // Serialization ignored helper flag
-  boolean isPersisted = false;
-
-  // Placeholder to add more type later
-  public enum MetricType {
-    GAUGE,
-    COUNTER,
-    UNDEFINED
-  }
-
-  // Default constructor
-  public TimelineMetricMetadata() {
-  }
-
-  public TimelineMetricMetadata(String metricName, String appId, String instanceId, String units,
-                                String type, Long seriesStartTime,
-                                boolean supportsAggregates, boolean isWhitelisted) {
-    this.metricName = metricName;
-    this.appId = appId;
-    this.instanceId = instanceId;
-    this.units = units;
-    this.type = type;
-    this.seriesStartTime = seriesStartTime;
-    this.supportsAggregates = supportsAggregates;
-    this.isWhitelisted = isWhitelisted;
-  }
-
-  @XmlElement(name = "metricname")
-  public String getMetricName() {
-    return metricName;
-  }
-
-  public void setMetricName(String metricName) {
-    this.metricName = metricName;
-  }
-
-  // This is the key for the webservice hence ignored.
-  //@XmlElement(name = "appid")
-  public String getAppId() {
-    return appId;
-  }
-
-  public void setAppId(String appId) {
-    this.appId = appId;
-  }
-
-  @XmlElement(name = "instanceId")
-  public String getInstanceId() {
-    return instanceId;
-  }
-
-  public void setInstanceId(String instanceId) {
-    this.instanceId = instanceId;
-  }
-
-  @XmlElement(name = "uuid")
-  public byte[] getUuid() {
-    return uuid;
-  }
-
-  public void setUuid(byte[] uuid) {
-    this.uuid = uuid;
-  }
-
-  @XmlElement(name = "units")
-  public String getUnits() {
-    return units;
-  }
-
-  public void setUnits(String units) {
-    this.units = units;
-  }
-
-  @XmlElement(name = "type")
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
-  @XmlElement(name = "seriesStartTime")
-  public Long getSeriesStartTime() {
-    return (seriesStartTime != null) ? seriesStartTime : 0l;
-  }
-
-  public void setSeriesStartTime(Long seriesStartTime) {
-    this.seriesStartTime = seriesStartTime;
-  }
-
-  @XmlElement(name = "supportsAggregation")
-  public boolean isSupportsAggregates() {
-    return supportsAggregates;
-  }
-
-  @XmlElement(name = "isWhitelisted")
-  public boolean isWhitelisted() {
-    return isWhitelisted;
-  }
-
-  public void setIsWhitelisted(boolean isWhitelisted) {
-    this.isWhitelisted = isWhitelisted;
-  }
-
-  public void setSupportsAggregates(boolean supportsAggregates) {
-    this.supportsAggregates = supportsAggregates;
-  }
-
-  @JsonIgnore
-  public boolean isPersisted() {
-    return isPersisted;
-  }
-
-  public void setIsPersisted(boolean isPersisted) {
-    this.isPersisted = isPersisted;
-  }
-
-  /**
-   * Assumes the key of the object being compared is the same as @TimelineMetricMetadata
-   * @param metadata @TimelineMetricMetadata to be compared
-   */
-  public boolean needsToBeSynced(TimelineMetricMetadata metadata) throws MetadataException {
-    if (!this.metricName.equals(metadata.getMetricName()) ||
-        !this.appId.equals(metadata.getAppId()) ||
-      !(StringUtils.isNotEmpty(instanceId) ? instanceId.equals(metadata.instanceId) : StringUtils.isEmpty(metadata.instanceId))) {
-      throw new MetadataException("Unexpected argument: metricName = " +
-        metadata.getMetricName() + ", appId = " + metadata.getAppId() + ", instanceId = " + metadata.getInstanceId());
-    }
-
-    // Series start time should never change
-    return (this.units != null && !this.units.equals(metadata.getUnits())) ||
-      (this.type != null && !this.type.equals(metadata.getType())) ||
-      //!this.lastRecordedTime.equals(metadata.getLastRecordedTime()) || // TODO: support
-      !this.supportsAggregates == metadata.isSupportsAggregates() ||
-      this.isWhitelisted != metadata.isWhitelisted;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    TimelineMetricMetadata that = (TimelineMetricMetadata) o;
-
-    if (!metricName.equals(that.metricName)) return false;
-    if (!appId.equals(that.appId)) return false;
-    return (StringUtils.isNotEmpty(instanceId) ? instanceId.equals(that.instanceId) : StringUtils.isEmpty(that.instanceId));
-  }
-
-  @Override
-  public int hashCode() {
-    int result = metricName.hashCode();
-    result = 31 * result + (appId != null ? appId.hashCode() : 0);
-    result = 31 * result + (instanceId != null ? instanceId.hashCode() : 0);
-    return result;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricUtils.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricUtils.java
deleted file mode 100644
index f140a8e..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricUtils.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import org.apache.commons.lang.StringUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class TimelineMetricUtils {
-
-  /**
-   * Given a SQL regex, convert it to JAVA regex.
-   * @param sqlRegex
-   * @return
-   */
-  public static String getJavaRegexFromSqlRegex(String sqlRegex) {
-    String javaRegEx;
-    if (sqlRegex.contains("*") || sqlRegex.contains("__%")) {
-      //Special case handling for metric name with * and __%.
-      //For example, dfs.NNTopUserOpCounts.windowMs=300000.op=*.user=%.count
-      // or dfs.NNTopUserOpCounts.windowMs=300000.op=__%.user=%.count
-      String metricNameWithEscSeq = sqlRegex.replace("*", "\\*").replace("__%", "..%");
-      javaRegEx = metricNameWithEscSeq.replace("%", ".*");
-    } else {
-      javaRegEx = sqlRegex.replace("%", ".*");
-    }
-    return javaRegEx;
-  }
-
-  /**
-   * Wrapper method to split comma separated strings and then invoke getJavaRegexFromSqlRegex.
-   * @param commaSeparatedMetricPatternsString
-   * @return
-   */
-  public static List<String> getJavaMetricPatterns(String commaSeparatedMetricPatternsString) {
-
-    List<String> javaPatterns = new ArrayList<>();
-    if (StringUtils.isEmpty(commaSeparatedMetricPatternsString)) {
-      return javaPatterns;
-    }
-
-    for (String patternString : commaSeparatedMetricPatternsString.split(",")) {
-      String javaPatternString = getJavaRegexFromSqlRegex(patternString);
-      javaPatterns.add(javaPatternString);
-    }
-    return javaPatterns;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricWithAggregatedValues.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricWithAggregatedValues.java
deleted file mode 100644
index 626ac5f..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricWithAggregatedValues.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-
-@XmlRootElement(name = "TimelineMetricWithAggregatedValues")
-@XmlAccessorType(XmlAccessType.NONE)
-public class TimelineMetricWithAggregatedValues {
-    private TimelineMetric timelineMetric;
-    private MetricHostAggregate metricAggregate;
-
-    public TimelineMetricWithAggregatedValues() {
-    }
-
-    public TimelineMetricWithAggregatedValues(TimelineMetric metric, MetricHostAggregate metricAggregate) {
-        timelineMetric = metric;
-        this.metricAggregate = metricAggregate;
-    }
-
-    @XmlElement
-    public MetricHostAggregate getMetricAggregate() {
-        return metricAggregate;
-    }
-    @XmlElement
-    public TimelineMetric getTimelineMetric() {
-        return timelineMetric;
-    }
-
-    public void setTimelineMetric(TimelineMetric timelineMetric) {
-        this.timelineMetric = timelineMetric;
-    }
-
-    public void setMetricAggregate(MetricHostAggregate metricAggregate) {
-        this.metricAggregate = metricAggregate;
-    }
-
-    @Override
-    public String toString() {
-        return "TimelineMetricWithAggregatedValues{" +
-                "timelineMetric=" + timelineMetric +
-                ", metricAggregate=" + metricAggregate +
-                '}';
-    }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetrics.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetrics.java
deleted file mode 100644
index a8d3da8..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetrics.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * The class that hosts a list of timeline entities.
- */
-@XmlRootElement(name = "metrics")
-@XmlAccessorType(XmlAccessType.NONE)
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class TimelineMetrics implements Serializable{
-
-  private List<TimelineMetric> allMetrics = new ArrayList<TimelineMetric>();
-
-  public TimelineMetrics() {}
-
-  @XmlElement(name = "metrics")
-  public List<TimelineMetric> getMetrics() {
-    return allMetrics;
-  }
-
-  public void setMetrics(List<TimelineMetric> allMetrics) {
-    this.allMetrics = allMetrics;
-  }
-
-  private boolean isEqualTimelineMetrics(TimelineMetric metric1,
-                                         TimelineMetric metric2) {
-
-    boolean isEqual = true;
-
-    if (!metric1.getMetricName().equals(metric2.getMetricName())) {
-      return false;
-    }
-
-    if (metric1.getHostName() != null) {
-      isEqual = metric1.getHostName().equals(metric2.getHostName());
-    }
-
-    if (metric1.getAppId() != null) {
-      isEqual = metric1.getAppId().equals(metric2.getAppId());
-    }
-
-    return isEqual;
-  }
-
-  /**
-   * Merge with existing TimelineMetric if everything except startTime is
-   * the same.
-   * @param metric {@link TimelineMetric}
-   */
-  public void addOrMergeTimelineMetric(TimelineMetric metric) {
-    TimelineMetric metricToMerge = null;
-
-    if (!allMetrics.isEmpty()) {
-      for (TimelineMetric timelineMetric : allMetrics) {
-        if (timelineMetric.equalsExceptTime(metric)) {
-          metricToMerge = timelineMetric;
-          break;
-        }
-      }
-    }
-
-    if (metricToMerge != null) {
-      metricToMerge.addMetricValues(metric.getMetricValues());
-      if (metricToMerge.getStartTime() > metric.getStartTime()) {
-        metricToMerge.setStartTime(metric.getStartTime());
-      }
-    } else {
-      allMetrics.add(metric);
-    }
-  }
-
-  // Optimization that addresses too many TreeMaps from getting created.
-  public void addOrMergeTimelineMetric(SingleValuedTimelineMetric metric) {
-    TimelineMetric metricToMerge = null;
-
-    if (!allMetrics.isEmpty()) {
-      for (TimelineMetric timelineMetric : allMetrics) {
-        if (metric.equalsExceptTime(timelineMetric)) {
-          metricToMerge = timelineMetric;
-          break;
-        }
-      }
-    }
-
-    if (metricToMerge != null) {
-      metricToMerge.getMetricValues().put(metric.getStartTime(), metric.getValue());
-      if (metricToMerge.getStartTime() > metric.getStartTime()) {
-        metricToMerge.setStartTime(metric.getStartTime());
-      }
-    } else {
-      allMetrics.add(metric.getTimelineMetric());
-    }
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TopNConfig.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TopNConfig.java
deleted file mode 100644
index 61127da..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TopNConfig.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement(name = "topnconfig")
-@XmlAccessorType(XmlAccessType.NONE)
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public class TopNConfig {
-  Integer topN;
-  String topNFunction;
-  Boolean isBottomN;
-
-  public TopNConfig(Integer topN, String topNFunction, Boolean isBottomN) {
-    this.setTopN(topN);
-    this.setTopNFunction(topNFunction);
-    this.setIsBottomN(isBottomN);
-  }
-
-  @XmlElement(name = "topn")
-  public Integer getTopN() {
-    return topN;
-  }
-
-  public void setTopN(Integer topN) {
-    this.topN = topN;
-  }
-
-  @XmlElement(name = "topnfunction")
-  public String getTopNFunction() {
-    return topNFunction;
-  }
-
-  public void setTopNFunction(String topNFunction) {
-    this.topNFunction = topNFunction;
-  }
-
-  @XmlElement(name = "isbottomn")
-  public Boolean getIsBottomN() {
-    return isBottomN;
-  }
-
-  public void setIsBottomN(Boolean isBottomN) {
-    this.isBottomN = isBottomN;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/UnableToConnectException.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/UnableToConnectException.java
deleted file mode 100644
index 797924f..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/UnableToConnectException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-public class UnableToConnectException extends RuntimeException {
-
-  private static final long serialVersionUID = 1L;
-
-  private String connectUrl;
-
-  public UnableToConnectException(String message, Throwable cause) {
-    super(message, cause);
-  }
-
-  public UnableToConnectException(String message) {
-    super(message);
-  }
-
-  public UnableToConnectException(Throwable cause) {
-    super(cause);
-  }
-
-  public UnableToConnectException setConnectUrl(String connectUrl) {
-    this.connectUrl = connectUrl;
-    return this;
-  }
-
-  public String getConnectUrl() {
-    return connectUrl;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
deleted file mode 100644
index 3071cbc..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.availability;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.curator.CuratorZookeeperClient;
-import org.apache.curator.RetryLoop;
-import org.apache.curator.RetryPolicy;
-import org.apache.curator.retry.BoundedExponentialBackoffRetry;
-import org.apache.zookeeper.ZooKeeper;
-import org.apache.zookeeper.data.Stat;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.Callable;
-
-/**
- * Find a live Collector instance from Zookeeper
- * This class allows connect to ZK on-demand and
- * does not add a watcher on the znode.
- */
-public class MetricCollectorHAHelper {
-  private final String zookeeperConnectionURL;
-  private final int tryCount;
-  private final int sleepMsBetweenRetries;
-
-  private static final int CONNECTION_TIMEOUT = 2000;
-  private static final int SESSION_TIMEOUT = 10000;
-  private static final String ZNODE = "/ambari-metrics-cluster";
-  private static final String ZK_PATH = ZNODE + "/LIVEINSTANCES";
-  private static final String INSTANCE_NAME_DELIMITER = "_";
-
-
-
-  private static final Log LOG = LogFactory.getLog(MetricCollectorHAHelper.class);
-
-  public MetricCollectorHAHelper(String zookeeperConnectionURL, int tryCount, int sleepMsBetweenRetries) {
-    this.zookeeperConnectionURL = zookeeperConnectionURL;
-    this.tryCount = tryCount;
-    this.sleepMsBetweenRetries = sleepMsBetweenRetries;
-  }
-
-  /**
-   * Connect to Zookeeper to find live instances of metrics collector
-   * @return {#link Collection} hostnames
-   */
-  public Collection<String> findLiveCollectorHostsFromZNode() {
-    Set<String> collectors = new HashSet<>();
-
-    RetryPolicy retryPolicy = new BoundedExponentialBackoffRetry(sleepMsBetweenRetries, 10*sleepMsBetweenRetries, tryCount);
-    final CuratorZookeeperClient client = new CuratorZookeeperClient(zookeeperConnectionURL,
-      SESSION_TIMEOUT, CONNECTION_TIMEOUT, null, retryPolicy);
-
-    List<String> liveInstances = null;
-
-    try {
-      client.start();
-      //Check if Znode exists
-      Stat stat = client.getZooKeeper().exists(ZNODE, false);
-      if (stat == null) {
-        LOG.info("/ambari-metrics-cluster znode does not exist. Skipping requesting live instances from zookeeper");
-        return collectors;
-      }
-      liveInstances = RetryLoop.callWithRetry(client, new Callable<List<String>>() {
-        @Override
-        public List<String> call() throws Exception {
-          ZooKeeper zookeeper = client.getZooKeeper();
-          return zookeeper.getChildren(ZK_PATH, false);
-        }
-      });
-    } catch (Exception e) {
-      LOG.warn("Unable to connect to zookeeper.", e);
-      LOG.debug(e);
-    } finally {
-      try {
-        client.close();
-      } catch (Exception e) {
-        LOG.error("Caught exception while trying to close Zk connection.",e);
-      }
-    }
-
-    // [ambari-sid-3.c.pramod-thangali.internal_12001]
-    if (liveInstances != null && !liveInstances.isEmpty()) {
-      for (String instanceStr : liveInstances) {
-        collectors.add(instanceStr.substring(0, instanceStr.indexOf(INSTANCE_NAME_DELIMITER)));
-      }
-    }
-
-    return collectors;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorUnavailableException.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorUnavailableException.java
deleted file mode 100644
index c381bbb..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorUnavailableException.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.availability;
-
-public class MetricCollectorUnavailableException extends Exception {
-  public MetricCollectorUnavailableException(String message) {
-    super(message);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricSinkWriteShardHostnameHashingStrategy.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricSinkWriteShardHostnameHashingStrategy.java
deleted file mode 100644
index 25bff54..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricSinkWriteShardHostnameHashingStrategy.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.availability;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import java.util.List;
-
-/**
- * Provides sharding based on hostname
- */
-public class MetricSinkWriteShardHostnameHashingStrategy implements MetricSinkWriteShardStrategy {
-  private final String hostname;
-  private final long hostnameHash;
-  private static final Log LOG = LogFactory.getLog(MetricSinkWriteShardHostnameHashingStrategy.class);
-
-  public MetricSinkWriteShardHostnameHashingStrategy(String hostname) {
-    this.hostname = hostname;
-    this.hostnameHash = hostname != null ? computeHash(hostname) : 1000; // some constant
-  }
-
-  @Override
-  public String findCollectorShard(List<String> collectorHosts) {
-    long index = hostnameHash % collectorHosts.size();
-    index = index < 0 ? index + collectorHosts.size() : index;
-    String collectorHost = collectorHosts.get((int) index);
-    LOG.info(String.format("Calculated collector shard %s based on hostname: %s", collectorHost, hostname));
-    return collectorHost;
-  }
-
-  /**
-   * Compute consistent hash based on hostname which should give decently
-   * uniform distribution assuming hostname generally have a sequential
-   * numeric suffix.
-   */
-  long computeHash(String hostname) {
-    long h = 11987L; // prime
-    int len = hostname.length();
-
-    for (int i = 0; i < len; i++) {
-      h = 31 * h + hostname.charAt(i);
-    }
-    return h;
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricSinkWriteShardStrategy.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricSinkWriteShardStrategy.java
deleted file mode 100644
index 7619555..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricSinkWriteShardStrategy.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.apache.hadoop.metrics2.sink.timeline.availability;
-
-import java.util.List;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-public interface MetricSinkWriteShardStrategy {
-  String findCollectorShard(List<String> collectorHosts);
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
deleted file mode 100644
index 0bed7d0..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.cache;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.concurrent.ConcurrentSkipListMap;
-
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public class TimelineMetricsCache {
-
-  private final TimelineMetricHolder timelineMetricCache = new TimelineMetricHolder();
-  private static final Log LOG = LogFactory.getLog(TimelineMetric.class);
-  public static final int MAX_RECS_PER_NAME_DEFAULT = 10000;
-  public static final int MAX_EVICTION_TIME_MILLIS = 59000; // ~ 1 min
-  private final int maxRecsPerName;
-  private final int maxEvictionTimeInMillis;
-  private boolean skipCounterTransform = true;
-  private final Map<String, Double> counterMetricLastValue = new HashMap<String, Double>();
-
-  public TimelineMetricsCache(int maxRecsPerName, int maxEvictionTimeInMillis) {
-    this(maxRecsPerName, maxEvictionTimeInMillis, false);
-  }
-
-  public TimelineMetricsCache(int maxRecsPerName, int maxEvictionTimeInMillis,
-                              boolean skipCounterTransform) {
-    this.maxRecsPerName = maxRecsPerName;
-    this.maxEvictionTimeInMillis = maxEvictionTimeInMillis;
-    this.skipCounterTransform = skipCounterTransform;
-  }
-
-  class TimelineMetricWrapper {
-    private long timeDiff = -1;
-    private long oldestTimestamp = -1;
-    private TimelineMetric timelineMetric;
-
-    TimelineMetricWrapper(TimelineMetric timelineMetric) {
-      this.timelineMetric = timelineMetric;
-      this.oldestTimestamp = timelineMetric.getStartTime();
-    }
-
-    private void updateTimeDiff(long timestamp) {
-      if (oldestTimestamp != -1 && timestamp > oldestTimestamp) {
-        timeDiff = timestamp - oldestTimestamp;
-      } else {
-        oldestTimestamp = timestamp;
-      }
-    }
-
-    public synchronized void putMetric(TimelineMetric metric) {
-      TreeMap<Long, Double> metricValues = this.timelineMetric.getMetricValues();
-      if (metricValues.size() > maxRecsPerName) {
-        // remove values for eldest maxEvictionTimeInMillis
-        long newEldestTimestamp = oldestTimestamp + maxEvictionTimeInMillis;
-        TreeMap<Long, Double> metricsSubSet =
-          new TreeMap<>(metricValues.tailMap(newEldestTimestamp));
-        if (metricsSubSet.isEmpty()) {
-          oldestTimestamp = metric.getStartTime();
-          this.timelineMetric.setStartTime(metric.getStartTime());
-        } else {
-          Long newStartTime = metricsSubSet.firstKey();
-          oldestTimestamp = newStartTime;
-          this.timelineMetric.setStartTime(newStartTime);
-        }
-        this.timelineMetric.setMetricValues(metricsSubSet);
-        LOG.warn("Metrics cache overflow. Values for metric " +
-          metric.getMetricName() + " older than " + newEldestTimestamp +
-          " were removed to clean up the cache.");
-      }
-      this.timelineMetric.addMetricValues(metric.getMetricValues());
-      updateTimeDiff(metric.getStartTime());
-    }
-
-    public synchronized long getTimeDiff() {
-      return timeDiff;
-    }
-
-    public synchronized TimelineMetric getTimelineMetric() {
-      return timelineMetric;
-    }
-  }
-
-  // TODO: Add weighted eviction
-  class TimelineMetricHolder extends ConcurrentSkipListMap<String, TimelineMetricWrapper> {
-    private static final long serialVersionUID = 2L;
-    // To avoid duplication at the end of the buffer and beginning of the next
-    // segment of values
-    private Map<String, Long> endOfBufferTimestamps = new HashMap<String, Long>();
-
-    public TimelineMetric evict(String metricName) {
-      TimelineMetricWrapper metricWrapper = this.get(metricName);
-
-      if (metricWrapper == null
-        || metricWrapper.getTimeDiff() < getMaxEvictionTimeInMillis()) {
-        return null;
-      }
-
-      TimelineMetric timelineMetric = metricWrapper.getTimelineMetric();
-      this.remove(metricName);
-
-      return timelineMetric;
-    }
-
-    public TimelineMetrics evictAll() {
-      List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
-
-      for (Iterator<Map.Entry<String, TimelineMetricWrapper>> it = this.entrySet().iterator(); it.hasNext();) {
-        Map.Entry<String, TimelineMetricWrapper> cacheEntry = it.next();
-        TimelineMetricWrapper metricWrapper = cacheEntry.getValue();
-        if (metricWrapper != null) {
-          TimelineMetric timelineMetric = cacheEntry.getValue().getTimelineMetric();
-          metricList.add(timelineMetric);
-        }
-        it.remove();
-      }
-      TimelineMetrics timelineMetrics = new TimelineMetrics();
-      timelineMetrics.setMetrics(metricList);
-      return timelineMetrics;
-    }
-
-    public void put(String metricName, TimelineMetric timelineMetric) {
-      if (isDuplicate(timelineMetric)) {
-        return;
-      }
-      TimelineMetricWrapper metric = this.get(metricName);
-      if (metric == null) {
-        this.put(metricName, new TimelineMetricWrapper(timelineMetric));
-      } else {
-        metric.putMetric(timelineMetric);
-      }
-      // Buffer last ts value
-      endOfBufferTimestamps.put(metricName, timelineMetric.getStartTime());
-    }
-
-    /**
-     * Test whether last buffered timestamp is same as the newly received.
-     * @param timelineMetric @TimelineMetric
-     * @return true/false
-     */
-    private boolean isDuplicate(TimelineMetric timelineMetric) {
-      return endOfBufferTimestamps.containsKey(timelineMetric.getMetricName())
-        && endOfBufferTimestamps.get(timelineMetric.getMetricName()).equals(timelineMetric.getStartTime());
-    }
-  }
-
-  public TimelineMetric getTimelineMetric(String metricName) {
-    if (timelineMetricCache.containsKey(metricName)) {
-      return timelineMetricCache.evict(metricName);
-    }
-
-    return null;
-  }
-
-  public TimelineMetrics getAllMetrics() {
-    return timelineMetricCache.evictAll();
-  }
-
-  /**
-   * Getter method to help testing eviction
-   * @return @int
-   */
-  public int getMaxEvictionTimeInMillis() {
-    return maxEvictionTimeInMillis;
-  }
-
-  public void putTimelineMetric(TimelineMetric timelineMetric) {
-    timelineMetricCache.put(timelineMetric.getMetricName(), timelineMetric);
-  }
-
-  private void transformMetricValuesToDerivative(TimelineMetric timelineMetric) {
-    String metricName = timelineMetric.getMetricName();
-    double firstValue = timelineMetric.getMetricValues().size() > 0
-        ? timelineMetric.getMetricValues().entrySet().iterator().next().getValue() : 0;
-    Double value = counterMetricLastValue.get(metricName);
-    double previousValue = value != null ? value : firstValue;
-    Map<Long, Double> metricValues = timelineMetric.getMetricValues();
-    TreeMap<Long, Double>   newMetricValues = new TreeMap<Long, Double>();
-    for (Map.Entry<Long, Double> entry : metricValues.entrySet()) {
-      newMetricValues.put(entry.getKey(), entry.getValue() - previousValue);
-      previousValue = entry.getValue();
-    }
-    timelineMetric.setMetricValues(newMetricValues);
-    counterMetricLastValue.put(metricName, previousValue);
-  }
-
-  public void putTimelineMetric(TimelineMetric timelineMetric, boolean isCounter) {
-    if (isCounter && !skipCounterTransform) {
-      transformMetricValuesToDerivative(timelineMetric);
-    }
-    putTimelineMetric(timelineMetric);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/configuration/Configuration.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/configuration/Configuration.java
deleted file mode 100644
index a0380e1..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/configuration/Configuration.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.timeline.configuration;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class Configuration {
-  public final Log LOG = LogFactory.getLog(this.getClass());
-  private final Properties properties;
-
-  public Configuration(String configFile) {
-    properties = new Properties();
-
-    //Get property file stream from classpath
-    InputStream inputStream = Configuration.class.getResourceAsStream(configFile);
-
-    if (inputStream == null) {
-      throw new IllegalArgumentException(configFile + " not found in classpath");
-    }
-
-    // load the properties
-    try {
-      properties.load(inputStream);
-      inputStream.close();
-    } catch (FileNotFoundException fnf) {
-      LOG.info("No configuration file " + configFile + " found in classpath.", fnf);
-    } catch (IOException ie) {
-      throw new IllegalArgumentException("Can't read configuration file " +
-          configFile, ie);
-    }
-  }
-
-  public String getProperty(String key) {
-    return properties.getProperty(key);
-  }
-
-  public String getProperty(String key, String defaultValue) {
-    return properties.getProperty(key, defaultValue);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/util/Servers.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/util/Servers.java
deleted file mode 100644
index 76da0a2..0000000
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/util/Servers.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.util;
-
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * Helpers to handle server addresses
- */
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public class Servers {
-  /**
-   * This class is not intended to be instantiated
-   */
-  private Servers() {}
-
-  /**
-   * Parses a space and/or comma separated sequence of server specifications
-   * of the form <i>hostname</i> or <i>hostname:port</i>.  If
-   * the specs string is null, defaults to localhost:defaultPort.
-   *
-   * @param specs   server specs (see description)
-   * @param defaultPort the default port if not specified
-   * @return a list of InetSocketAddress objects.
-   */
-  public static List<InetSocketAddress> parse(String specs, int defaultPort) {
-    List<InetSocketAddress> result = new ArrayList<InetSocketAddress>();
-    if (specs == null) {
-      result.add(new InetSocketAddress("localhost", defaultPort));
-    } else {
-      String[] specStrings = specs.split("[ ,]+");
-      for (String specString : specStrings) {
-        result.add(createSocketAddr(specString, defaultPort));
-      }
-    }
-    return result;
-  }
-
-  /**
-   * @param host
-   * @param port
-   * @return a InetSocketAddress created with the specified host and port
-   */
-  private static InetSocketAddress createSocketAddr(String target, int defaultPort) {
-    String helpText = "";
-    if (target == null) {
-      throw new IllegalArgumentException("Target address cannot be null." + helpText);
-    }
-    boolean hasScheme = target.contains("://");
-    URI uri = null;
-    try {
-      uri = hasScheme ? URI.create(target) : URI.create("dummyscheme://" + target);
-    } catch (IllegalArgumentException e) {
-      throw new IllegalArgumentException("Does not contain a valid host:port authority: " + target + helpText);
-    }
-
-    String host = uri.getHost();
-    int port = uri.getPort();
-    if (port == -1) {
-      port = defaultPort;
-    }
-    String path = uri.getPath();
-
-    if ((host == null) || (port < 0) || (!hasScheme && path != null && !path.isEmpty())) {
-      throw new IllegalArgumentException("Does not contain a valid host:port authority: " + target + helpText);
-    }
-    return createSocketAddrForHost(host, port);
-  }
-
-  /**
-   * @param host
-   * @param port
-   * @return a InetSocketAddress created with the specified host and port
-   */
-  private static InetSocketAddress createSocketAddrForHost(String host, int port) {
-    InetSocketAddress addr;
-    try {
-      InetAddress iaddr = InetAddress.getByName(host);
-      iaddr = InetAddress.getByAddress(host, iaddr.getAddress());
-      addr = new InetSocketAddress(iaddr, port);
-    } catch (UnknownHostException e) {
-      addr = InetSocketAddress.createUnresolved(host, port);
-    }
-    return addr;
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricSinkTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricSinkTest.java
deleted file mode 100644
index 634d18c..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricSinkTest.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import junit.framework.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.easymock.PowerMock;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import java.io.OutputStream;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.TreeMap;
-
-import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.expect;
-import static org.powermock.api.easymock.PowerMock.expectNew;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({AbstractTimelineMetricsSink.class, HttpURLConnection.class})
-public class AbstractTimelineMetricSinkTest {
-
-  @Test
-  public void testParseHostsStringIntoCollection() {
-    AbstractTimelineMetricsSink sink = new TestTimelineMetricsSink();
-    Collection<String> hosts;
-
-    hosts = sink.parseHostsStringIntoCollection("");
-    Assert.assertTrue(hosts.isEmpty());
-
-    hosts = sink.parseHostsStringIntoCollection("test1.123.abc.def.local");
-    Assert.assertTrue(hosts.size() == 1);
-    Assert.assertTrue(hosts.contains("test1.123.abc.def.local"));
-
-    hosts = sink.parseHostsStringIntoCollection("test1.123.abc.def.local ");
-    Assert.assertTrue(hosts.size() == 1);
-    Assert.assertTrue(hosts.contains("test1.123.abc.def.local"));
-
-    hosts = sink.parseHostsStringIntoCollection("test1.123.abc.def.local,test1.456.abc.def.local");
-    Assert.assertTrue(hosts.size() == 2);
-
-    hosts = sink.parseHostsStringIntoCollection("test1.123.abc.def.local, test1.456.abc.def.local");
-    Assert.assertTrue(hosts.size() == 2);
-    Assert.assertTrue(hosts.contains("test1.123.abc.def.local"));
-    Assert.assertTrue(hosts.contains("test1.456.abc.def.local"));
-  }
-
-  @Test
-  @PrepareForTest({URL.class, OutputStream.class, AbstractTimelineMetricsSink.class, HttpURLConnection.class, TimelineMetric.class})
-  public void testEmitMetrics() throws Exception {
-    HttpURLConnection connection = PowerMock.createNiceMock(HttpURLConnection.class);
-    URL url = PowerMock.createNiceMock(URL.class);
-    expectNew(URL.class, anyString()).andReturn(url).anyTimes();
-    expect(url.openConnection()).andReturn(connection).anyTimes();
-    expect(connection.getResponseCode()).andReturn(200).anyTimes();
-    OutputStream os = PowerMock.createNiceMock(OutputStream.class);
-    expect(connection.getOutputStream()).andReturn(os).anyTimes();
-
-
-    TestTimelineMetricsSink sink = new TestTimelineMetricsSink();
-    TimelineMetrics timelineMetrics = new TimelineMetrics();
-    long startTime = System.currentTimeMillis() / 60000 * 60000;
-
-    long seconds = 1000;
-    TreeMap<Long, Double> metricValues = new TreeMap<>();
-    /*
-
-    0        +30s      +60s
-    |         |         |
-      (1)(2)(3) (4)(5)   (6)  m1
-
-    */
-    // (6) should be cached, the rest - posted
-
-    metricValues.put(startTime + 4*seconds, 1.0);
-    metricValues.put(startTime + 14*seconds, 2.0);
-    metricValues.put(startTime + 24*seconds, 3.0);
-    metricValues.put(startTime + 34*seconds, 4.0);
-    metricValues.put(startTime + 44*seconds, 5.0);
-    metricValues.put(startTime + 64*seconds, 6.0);
-
-    TimelineMetric timelineMetric = new TimelineMetric("metric1", "host1", "app1", "instance1");
-    timelineMetric.setStartTime(metricValues.firstKey());
-    timelineMetric.addMetricValues(metricValues);
-
-    timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-
-    replayAll();
-    sink.emitMetrics(timelineMetrics);
-    Assert.assertEquals(1, sink.getMetricsPostCache().size());
-    metricValues = new TreeMap<>();
-    metricValues.put(startTime + 64*seconds, 6.0);
-    Assert.assertEquals(metricValues, sink.getMetricsPostCache().getIfPresent("metric1").getMetricValues());
-
-    timelineMetrics = new TimelineMetrics();
-    metricValues = new TreeMap<>();
-    /*
-
-    +60      +90s     +120s     +150s     +180s
-    |         |         |         |         |
-       (7)      (8)       (9)           (10)   (11)   m1
-
-    */
-    // (6) from previous post should be merged with current data
-    // (6),(7),(8),(9),(10) - should be posted, (11) - cached
-    metricValues.put(startTime + 74*seconds, 7.0);
-    metricValues.put(startTime + 94*seconds, 8.0);
-    metricValues.put(startTime + 124*seconds, 9.0);
-    metricValues.put(startTime + 154*seconds, 10.0);
-    metricValues.put(startTime + 184*seconds, 11.0);
-
-    timelineMetric = new TimelineMetric("metric1", "host1", "app1", "instance1");
-    timelineMetric.setStartTime(metricValues.firstKey());
-    timelineMetric.addMetricValues(metricValues);
-
-    timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-    sink.emitMetrics(timelineMetrics);
-
-    Assert.assertEquals(1, sink.getMetricsPostCache().size());
-    metricValues = new TreeMap<>();
-    metricValues.put(startTime + 184*seconds, 11.0);
-    Assert.assertEquals(metricValues, sink.getMetricsPostCache().getIfPresent("metric1").getMetricValues());timelineMetrics = new TimelineMetrics();
-
-    metricValues = new TreeMap<>();
-    /*
-
-    +180s   +210s   +240s
-    |         |       |
-       (12)        (13)
-
-    */
-    // (11) from previous post should be merged with current data
-    // (11),(12),(13) - should be posted, cache should be empty
-    metricValues.put(startTime + 194*seconds, 12.0);
-    metricValues.put(startTime + 239*seconds, 13.0);
-
-    timelineMetric = new TimelineMetric("metric1", "host1", "app1", "instance1");
-    timelineMetric.setStartTime(metricValues.firstKey());
-    timelineMetric.addMetricValues(metricValues);
-
-    timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-    sink.emitMetrics(timelineMetrics);
-
-    Assert.assertEquals(0, sink.getMetricsPostCache().size());
-
-    metricValues = new TreeMap<>();
-    /*
-
-    +240s   +270s   +300s   +330s
-    |         |       |       |
-       (14)        (15)   (16)
-
-    */
-    // since postAllCachedMetrics in emitMetrics call is true (14),(15),(16) - should be posted, cache should be empty
-    metricValues.put(startTime + 245*seconds, 14.0);
-    metricValues.put(startTime + 294*seconds, 15.0);
-    metricValues.put(startTime + 315*seconds, 16.0);
-
-    timelineMetric = new TimelineMetric("metric1", "host1", "app1", "instance1");
-    timelineMetric.setStartTime(metricValues.firstKey());
-    timelineMetric.addMetricValues(metricValues);
-
-    timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-    sink.emitMetrics(timelineMetrics, true);
-
-    Assert.assertEquals(0, sink.getMetricsPostCache().size());
-  }
-
-  private class TestTimelineMetricsSink extends AbstractTimelineMetricsSink {
-    @Override
-    protected String getCollectorUri(String host) {
-      return "";
-    }
-
-    @Override
-    protected String getCollectorProtocol() {
-      return "http";
-    }
-
-    @Override
-    protected String getCollectorPort() {
-      return "2181";
-    }
-
-    @Override
-    protected int getTimeoutSeconds() {
-      return 10;
-    }
-
-    @Override
-    protected String getZookeeperQuorum() {
-      return "localhost:2181";
-    }
-
-    @Override
-    protected Collection<String> getConfiguredCollectorHosts() {
-      return Arrays.asList("localhost");
-    }
-
-    @Override
-    protected String getHostname() {
-      return "h1";
-    }
-
-    @Override
-    protected boolean isHostInMemoryAggregationEnabled() {
-      return true;
-    }
-
-    @Override
-    protected int getHostInMemoryAggregationPort() {
-      return 61888;
-    }
-
-    @Override
-    protected String getHostInMemoryAggregationProtocol() {
-      return "http";
-    }
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
deleted file mode 100644
index 8355288..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.timeline;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
-import org.junit.Test;
-
-public class AppCookieManagerTest {
-
-  @Test
-  public void getCachedAppCookie() {
-    assertNull(new AppCookieManager().getCachedAppCookie("http://dummy"));
-  }
-
-  @Test
-  public void getHadoopAuthCookieValueWithNullHeaders() {
-    assertNull(AppCookieManager.getHadoopAuthCookieValue(null));
-  }
-
-  @Test
-  public void getHadoopAuthCookieValueWitEmptylHeaders() {
-    assertNull(AppCookieManager.getHadoopAuthCookieValue(new Header[0]));
-  }
-
-  @Test
-  public void getHadoopAuthCookieValueWithValidlHeaders() {
-    Header[] headers = new Header[1];
-    headers[0] = new BasicHeader("Set-Cookie", AppCookieManager.HADOOP_AUTH + "=dummyvalue");
-    assertNotNull(AppCookieManager.getHadoopAuthCookieValue(headers));
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java
deleted file mode 100644
index 0abc5fc..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.availability;
-
-import com.google.gson.Gson;
-import junit.framework.Assert;
-import org.apache.commons.io.IOUtils;
-import org.apache.curator.CuratorZookeeperClient;
-import org.apache.curator.retry.BoundedExponentialBackoffRetry;
-import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
-import org.apache.zookeeper.ZooKeeper;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.easymock.PowerMock;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.powermock.api.easymock.PowerMock.createNiceMock;
-import static org.powermock.api.easymock.PowerMock.expectNew;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-import static org.powermock.api.easymock.PowerMock.verifyAll;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({AbstractTimelineMetricsSink.class, URL.class, HttpURLConnection.class, MetricCollectorHAHelper.class})
-public class MetricCollectorHATest {
-
-  @Test
-  public void findCollectorUsingZKTest() throws Exception {
-    InputStream is = createNiceMock(InputStream.class);
-    HttpURLConnection connection = createNiceMock(HttpURLConnection.class);
-    URL url = createNiceMock(URL.class);
-    MetricCollectorHAHelper haHelper = createNiceMock(MetricCollectorHAHelper.class);
-
-    expectNew(URL.class, "http://localhost1:2181/ws/v1/timeline/metrics/livenodes").andReturn(url).anyTimes();
-    expectNew(URL.class, "http://localhost2:2181/ws/v1/timeline/metrics/livenodes").andReturn(url).anyTimes();
-    expect(url.openConnection()).andReturn(connection).anyTimes();
-    expect(connection.getInputStream()).andReturn(is).anyTimes();
-    expect(connection.getResponseCode()).andThrow(new IOException()).anyTimes();
-    expect(haHelper.findLiveCollectorHostsFromZNode()).andReturn(
-      new ArrayList<String>() {{
-        add("h2");
-        add("h3");
-      }});
-
-    replayAll();
-    TestTimelineMetricsSink sink = new TestTimelineMetricsSink(haHelper);
-    sink.init();
-
-    String host = sink.findPreferredCollectHost();
-
-    verifyAll();
-
-    Assert.assertNotNull(host);
-    Assert.assertEquals("h2", host);
-
-  }
-
-
-  @Test
-  public void testEmbeddedModeCollectorZK() throws Exception {
-
-
-    BoundedExponentialBackoffRetry retryPolicyMock = PowerMock.createMock(BoundedExponentialBackoffRetry.class);
-    expectNew(BoundedExponentialBackoffRetry.class, 1000, 10000, 1).andReturn(retryPolicyMock);
-
-    CuratorZookeeperClient clientMock = PowerMock.createMock(CuratorZookeeperClient.class);
-    expectNew(CuratorZookeeperClient.class, "zkQ", 10000, 2000, null, retryPolicyMock)
-      .andReturn(clientMock);
-
-    clientMock.start();
-    expectLastCall().once();
-
-    clientMock.close();
-    expectLastCall().once();
-
-    ZooKeeper zkMock = PowerMock.createMock(ZooKeeper.class);
-    expect(clientMock.getZooKeeper()).andReturn(zkMock).once();
-
-    expect(zkMock.exists("/ambari-metrics-cluster", false)).andReturn(null).once();
-
-    replayAll();
-    MetricCollectorHAHelper metricCollectorHAHelper = new MetricCollectorHAHelper("zkQ", 1, 1000);
-    Collection<String> liveInstances = metricCollectorHAHelper.findLiveCollectorHostsFromZNode();
-    verifyAll();
-    Assert.assertTrue(liveInstances.isEmpty());
-  }
-
-  @Test
-  public void findCollectorUsingKnownCollectorTest() throws Exception {
-    HttpURLConnection connection = createNiceMock(HttpURLConnection.class);
-    URL url = createNiceMock(URL.class);
-    MetricCollectorHAHelper haHelper = createNiceMock(MetricCollectorHAHelper.class);
-
-    Gson gson = new Gson();
-    ArrayList<String> output = new ArrayList<>();
-    output.add("h1");
-    output.add("h2");
-    output.add("h3");
-    InputStream is = IOUtils.toInputStream(gson.toJson(output));
-
-    expectNew(URL.class, "http://localhost1:2181/ws/v1/timeline/metrics/livenodes").andReturn(url).anyTimes();
-    expectNew(URL.class, "http://localhost2:2181/ws/v1/timeline/metrics/livenodes").andReturn(url).anyTimes();
-    expect(url.openConnection()).andReturn(connection).anyTimes();
-    expect(connection.getInputStream()).andReturn(is).anyTimes();
-    expect(connection.getResponseCode()).andReturn(200).anyTimes();
-
-    replayAll();
-    TestTimelineMetricsSink sink = new TestTimelineMetricsSink(haHelper);
-    sink.init();
-
-    String host = sink.findPreferredCollectHost();
-    Assert.assertNotNull(host);
-    Assert.assertEquals("h3", host);
-
-    verifyAll();
-  }
-
-  private class TestTimelineMetricsSink extends AbstractTimelineMetricsSink {
-    MetricCollectorHAHelper testHelper;
-
-    TestTimelineMetricsSink(MetricCollectorHAHelper haHelper) {
-      testHelper = haHelper;
-    }
-
-    @Override
-    protected void init() {
-      super.init();
-      this.collectorHAHelper = testHelper;
-    }
-
-    @Override
-    protected synchronized String findPreferredCollectHost() {
-      return super.findPreferredCollectHost();
-    }
-
-    @Override
-    protected String getCollectorUri(String host) {
-      return null;
-    }
-
-    @Override
-    protected String getCollectorProtocol() {
-      return "http";
-    }
-
-    @Override
-    protected String getCollectorPort() {
-      return "2181";
-    }
-
-    @Override
-    protected int getTimeoutSeconds() {
-      return 10;
-    }
-
-    @Override
-    protected String getZookeeperQuorum() {
-      return "localhost1:2181";
-    }
-
-    @Override
-    protected Collection<String> getConfiguredCollectorHosts() {
-      return Arrays.asList("localhost1",  "localhost2");
-    }
-
-    @Override
-    protected String getHostname() {
-      return "h1";
-    }
-
-    @Override
-    protected boolean isHostInMemoryAggregationEnabled() {
-      return true;
-    }
-
-    @Override
-    protected int getHostInMemoryAggregationPort() {
-      return 61888;
-    }
-
-    @Override
-    protected String getHostInMemoryAggregationProtocol() {
-      return "http";
-    }
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/ShardingStrategyTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/ShardingStrategyTest.java
deleted file mode 100644
index 60ed824..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/ShardingStrategyTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.availability;
-
-import junit.framework.Assert;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class ShardingStrategyTest {
-  @Test
-  public void testHostnameShardingStrategy() throws Exception {
-    List<String> collectorHosts = new ArrayList<String>() {{
-      add("mycollector-1.hostname.domain");
-      add("mycollector-2.hostname.domain");
-    }};
-
-    String hostname1 = "some-very-long-hostname-with-a-trailing-number-identifier-10.mylocalhost.domain";
-
-    // Consistency check
-    String collectorShard1 = null;
-    for (int i = 0; i < 100; i++) {
-      MetricSinkWriteShardStrategy strategy = new MetricSinkWriteShardHostnameHashingStrategy(hostname1);
-      collectorShard1 = strategy.findCollectorShard(collectorHosts);
-      Assert.assertEquals(collectorShard1, strategy.findCollectorShard(collectorHosts));
-    }
-
-    // Shard 2 hosts
-    String hostname2 = "some-very-long-hostname-with-a-trailing-number-identifier-20.mylocalhost.domain";
-    MetricSinkWriteShardStrategy strategy = new MetricSinkWriteShardHostnameHashingStrategy(hostname2);
-    String collectorShard2 = strategy.findCollectorShard(collectorHosts);
-
-    Assert.assertEquals("mycollector-1.hostname.domain", collectorShard1);
-    Assert.assertEquals("mycollector-2.hostname.domain", collectorShard2);
-  }
-
-  @Test
-  public void testShardStrategyOnOverflow() {
-    List<String> collectorHosts = new ArrayList<String>() {{
-      add("ambari-sid-4.c.pramod-thangali.internal");
-      add("ambari-sid-5.c.pramod-thangali.internal");
-    }};
-
-    MetricSinkWriteShardStrategy strategy = new MetricSinkWriteShardHostnameHashingStrategy("ambari-sid-4.c.pramod-thangali.internal");
-    String collector = strategy.findCollectorShard(collectorHosts);
-    Assert.assertTrue(collector != null && !collector.isEmpty());
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
deleted file mode 100644
index 4bcc2fb..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
+++ /dev/null
@@ -1,243 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.cache;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Collection;
-
-import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.hadoop.metrics2.sink.timeline.UnableToConnectException;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.expect;
-import static org.powermock.api.easymock.PowerMock.createNiceMock;
-import static org.powermock.api.easymock.PowerMock.expectNew;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({AbstractTimelineMetricsSink.class, URL.class, HttpURLConnection.class})
-public class HandleConnectExceptionTest {
-  private static final String COLLECTOR_URL = "collector";
-  private TestTimelineMetricsSink sink;
-
-  @Before
-  public void init(){
-    sink = new TestTimelineMetricsSink();
-    OutputStream os = createNiceMock(OutputStream.class);
-    HttpURLConnection connection = createNiceMock(HttpURLConnection.class);
-    URL url = createNiceMock(URL.class);
-    AbstractTimelineMetricsSink.NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 2;
-    try {
-      expectNew(URL.class, anyString()).andReturn(url).anyTimes();
-      expect(url.openConnection()).andReturn(connection).anyTimes();
-      expect(connection.getOutputStream()).andReturn(os).anyTimes();
-      expect(connection.getResponseCode()).andThrow(new IOException()).anyTimes();
-
-      replayAll();
-    } catch (Exception e) {
-      //no-op
-    }
-  }
-
-  @Test
-  public void handleTest(){
-    emitMetricsWithExpectedException(new TimelineMetrics());
-    try {
-      sink.emitMetrics(new TimelineMetrics());
-    } catch (Exception e) {
-      Assert.fail("There should be no exception");
-    }
-    emitMetricsWithExpectedException(new TimelineMetrics());
-  }
-
-  private void emitMetricsWithExpectedException(TimelineMetrics timelineMetrics) {
-    try{
-      sink.emitMetrics(timelineMetrics);
-      Assert.fail();
-    } catch (UnableToConnectException e){
-      Assert.assertEquals(COLLECTOR_URL, e.getConnectUrl());
-    } catch (Exception e){
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testEmitMetricsWithNullHost() {
-    TestTimelineMetricsSinkWithNullHost sinkWithNullHost = new TestTimelineMetricsSinkWithNullHost();
-
-    boolean success = sinkWithNullHost.emitMetrics(new TimelineMetrics());
-    Assert.assertFalse(success);
-
-    success = sinkWithNullHost.emitMetrics(new TimelineMetrics());
-    Assert.assertTrue(success);
-  }
-
-  private class TestTimelineMetricsSink extends AbstractTimelineMetricsSink{
-    @Override
-    protected String getCollectorUri(String host) {
-      return COLLECTOR_URL;
-    }
-
-    @Override
-    protected String getCollectorProtocol() {
-      return "http";
-    }
-
-    @Override
-    protected String getCollectorPort() {
-      return "2181";
-    }
-
-    @Override
-    protected int getTimeoutSeconds() {
-      return 10;
-    }
-
-    @Override
-    protected String getZookeeperQuorum() {
-      return "localhost:2181";
-    }
-
-    @Override
-    protected Collection<String> getConfiguredCollectorHosts() {
-      return Arrays.asList("localhost");
-    }
-
-    @Override
-    protected String getHostname() {
-      return "h1";
-    }
-
-    @Override
-    protected boolean isHostInMemoryAggregationEnabled() {
-      return false;
-    }
-
-    @Override
-    protected int getHostInMemoryAggregationPort() {
-      return 61888;
-    }
-
-    @Override
-    protected String getHostInMemoryAggregationProtocol() {
-      return "http";
-    }
-
-    @Override
-    public boolean emitMetrics(TimelineMetrics metrics) {
-      super.init();
-      return super.emitMetrics(metrics);
-    }
-
-    @Override
-    protected synchronized String findPreferredCollectHost() {
-      return "localhost";
-    }
-
-  }
-
-  private class TestTimelineMetricsSinkWithNullHost extends AbstractTimelineMetricsSink {
-
-    int ctr = 0;
-
-    @Override
-    protected String getCollectorUri(String host) {
-      return COLLECTOR_URL;
-    }
-
-    @Override
-    protected String getCollectorProtocol() {
-      return "http";
-    }
-
-    @Override
-    protected String getCollectorPort() {
-      return "2181";
-    }
-
-    @Override
-    protected int getTimeoutSeconds() {
-      return 10;
-    }
-
-    @Override
-    protected String getZookeeperQuorum() {
-      return "localhost:2181";
-    }
-
-    @Override
-    protected Collection<String> getConfiguredCollectorHosts() {
-      return Arrays.asList("localhost");
-    }
-
-    @Override
-    protected String getHostname() {
-      return "h1";
-    }
-
-    @Override
-    protected boolean isHostInMemoryAggregationEnabled() {
-      return false;
-    }
-
-    @Override
-    protected int getHostInMemoryAggregationPort() {
-      return 0;
-    }
-
-    @Override
-    protected String getHostInMemoryAggregationProtocol() {
-      return "http";
-    }
-
-    @Override
-    public boolean emitMetrics(TimelineMetrics metrics) {
-      super.init();
-      return super.emitMetrics(metrics);
-    }
-
-    @Override
-    protected synchronized String findPreferredCollectHost() {
-      if (ctr == 0) {
-        ctr++;
-        return null;
-      } else {
-        return "localhost";
-      }
-    }
-
-    @Override
-    protected boolean emitMetricsJson(String connectUrl, String jsonData) {
-      return true;
-    }
-
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/PostProcessingUtilTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/PostProcessingUtilTest.java
deleted file mode 100644
index d8387d0..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/PostProcessingUtilTest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.cache;
-
-import junit.framework.Assert;
-import org.apache.hadoop.metrics2.sink.timeline.PostProcessingUtil;
-import org.apache.hadoop.metrics2.sink.timeline.SingleValuedTimelineMetric;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-public class PostProcessingUtilTest {
-
-  @Test
-  public void testInterpolateMissinData() throws Exception {
-
-    Map<Long, Double> metricValues = new TreeMap<Long, Double>();
-    long interval = 60*1000;
-
-    long currentTime = System.currentTimeMillis();
-
-    for(int i = 10; i>=1;i--) {
-      if (i%4 != 0 && i != 5) { //Skip time points 4,5,8
-        metricValues.put(currentTime - i*interval, (double)i);
-      }
-    }
-    metricValues = PostProcessingUtil.interpolateMissingData(metricValues, interval);
-    Assert.assertTrue(metricValues.size() == 10);
-
-    Iterator it = metricValues.entrySet().iterator();
-    double sum = 0;
-    while (it.hasNext()) {
-      Map.Entry entry = (Map.Entry)it.next();
-      sum+= (double)entry.getValue();
-    }
-    Assert.assertEquals(sum, 55.0);
-  }
-
-  @Test
-  public void testInterpolate() throws Exception {
-
-    long t2 = System.currentTimeMillis();
-    long t1 = t2 - 60000;
-    double interpolatedValue;
-
-    //Test Equal Values
-    interpolatedValue = PostProcessingUtil.interpolate((t1 + 30000), t1, 10.0, t2, 10.0);
-    Assert.assertEquals(interpolatedValue, 10.0);
-
-    //Test Linear increase Values
-    interpolatedValue = PostProcessingUtil.interpolate((t1 + 30000), t1, 10.0, t2, 20.0);
-    Assert.assertEquals(interpolatedValue, 15.0);
-
-    //Test Linear decrease Values
-    interpolatedValue = PostProcessingUtil.interpolate((t1 + 30000), t1, 20.0, t2, 10.0);
-    Assert.assertEquals(interpolatedValue, 15.0);
-
-    //Test interpolation with non mid point time
-    interpolatedValue = PostProcessingUtil.interpolate((t1 + 20000), t1, 15.0, t2, 30.0); // 1:2 ratio
-    Assert.assertEquals(interpolatedValue, 20.0);
-
-    //Test interpolation with past time
-    interpolatedValue = PostProcessingUtil.interpolate((t1 - 60000), t1, 20.0, t2, 30.0);
-    Assert.assertEquals(interpolatedValue, 10.0);
-
-  }
-
-  @Test
-  public void testLinearInterpolate() throws Exception {
-
-    long t2 = System.currentTimeMillis();
-
-    Map<Long, Double> valuesMap = new TreeMap<>();
-
-    valuesMap.put(t2 - 4 * 3000, 4.0);
-    valuesMap.put(t2 - 2 * 3000, 2.0);
-    valuesMap.put(t2 - 1 * 3000, 1.0);
-
-    List<Long> requiredTs = new ArrayList<Long>();
-    requiredTs.add(t2 - 5*3000);
-    requiredTs.add(t2 - 3*3000);
-    requiredTs.add(t2);
-
-    Map result = PostProcessingUtil.interpolate(valuesMap, requiredTs);
-
-    Assert.assertNotNull(result);
-    Assert.assertEquals(result.get(t2 - 5*3000), 5.0);
-    Assert.assertEquals(result.get(t2 - 3*3000), 3.0);
-    Assert.assertEquals(result.get(t2), 0.0);
-    System.out.println(result.toString());
-
-  }
-
-  }
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCacheTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCacheTest.java
deleted file mode 100644
index 18d973c..0000000
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCacheTest.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.sink.timeline.cache;
-
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.junit.Test;
-
-import java.util.Map;
-import java.util.TreeMap;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-
-public class TimelineMetricsCacheTest {
-
-  private static final long DEFAULT_START_TIME = 1411023766;
-  private static final String METRIC_NAME = "Test name";
-  private static final double delta = 0.00001;
-
-  private final TimelineMetricsCache timelineMetricsCache =
-    new TimelineMetricsCache(TimelineMetricsCache.MAX_RECS_PER_NAME_DEFAULT,
-                             TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS);
-
-  @Test
-  public void testPutGetCounterTimelineMetric() throws Exception {
-    TimelineMetric metric = createTimelineMetric(new TreeMap<Long, Double>() {{
-      put(1L, 10.0);
-    }}, DEFAULT_START_TIME);
-    timelineMetricsCache.putTimelineMetric(metric, true);
-    metric = createTimelineMetric(new TreeMap<Long, Double>() {{
-      put(2L, 10.0);
-      put(3L, 20.0);
-      put(4L, 30.0);
-    }}, DEFAULT_START_TIME + 2 * TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS);
-    timelineMetricsCache.putTimelineMetric(metric, true);
-    TimelineMetric cachedMetric
-        = timelineMetricsCache.getTimelineMetric(METRIC_NAME);
-    assertEquals(0, cachedMetric.getMetricValues().get(1L), delta);
-    assertEquals(0, cachedMetric.getMetricValues().get(2L), delta);
-    assertEquals(10, cachedMetric.getMetricValues().get(3L), delta);
-    assertEquals(10, cachedMetric.getMetricValues().get(4L), delta);
-
-    metric = createTimelineMetric(new TreeMap<Long, Double>() {{
-      put(5L, 100.0);
-      put(6L, 120.0);
-      put(7L, 230.0);
-    }}, DEFAULT_START_TIME + 3 * TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS);
-    timelineMetricsCache.putTimelineMetric(metric, true);
-    metric = createTimelineMetric(new TreeMap<Long, Double>() {{
-      put(8L, 300.0);
-    }}, DEFAULT_START_TIME + 5 * TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS);
-
-    timelineMetricsCache.putTimelineMetric(metric, true);
-    cachedMetric = timelineMetricsCache.getTimelineMetric(METRIC_NAME);
-    assertEquals(70, cachedMetric.getMetricValues().get(5L), delta);
-    assertEquals(20, cachedMetric.getMetricValues().get(6L), delta);
-    assertEquals(110, cachedMetric.getMetricValues().get(7L), delta);
-    assertEquals(70, cachedMetric.getMetricValues().get(8L), delta);
-  }
-
-  @Test
-  public void testMaxRecsPerName() throws Exception {
-    int maxRecsPerName = 2;
-    int maxEvictionTime = TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS ;
-    TimelineMetricsCache timelineMetricsCache =
-      new TimelineMetricsCache(maxRecsPerName, maxEvictionTime);
-
-    // put 2 metrics , no cache overflow
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME ));
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME + maxEvictionTime * 2));
-    TimelineMetric cachedMetric = timelineMetricsCache.getTimelineMetric(METRIC_NAME);
-    assertNotNull(cachedMetric);
-    assertFalse(cachedMetric.getMetricValues().isEmpty());
-    assertEquals("2 values added.", 2, cachedMetric.getMetricValues().size());
-    assertEquals(DEFAULT_START_TIME, cachedMetric.getStartTime());
-
-    // put 3 metrics, no cache overflow. check is performed before put operation
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME ));
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME + maxEvictionTime * 2));
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME + maxEvictionTime * 3));
-    cachedMetric = timelineMetricsCache.getTimelineMetric(METRIC_NAME);
-    assertNotNull(cachedMetric);
-    assertFalse(cachedMetric.getMetricValues().isEmpty());
-    assertEquals("3 values added.", 3, cachedMetric.getMetricValues().size());
-    assertEquals(DEFAULT_START_TIME, cachedMetric.getStartTime());
-
-    // put 4 metric values, cache cleaned.
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME ));
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME + maxEvictionTime * 2));
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME + maxEvictionTime * 3));
-    timelineMetricsCache.putTimelineMetric(
-      createTimelineMetricSingleValue(DEFAULT_START_TIME + maxEvictionTime * 4));
-    cachedMetric = timelineMetricsCache.getTimelineMetric(METRIC_NAME);
-    assertNotNull(cachedMetric);
-    assertFalse(cachedMetric.getMetricValues().isEmpty());
-    // check is performed before put operation. while putting 4th metric value,
-    // the first value deleted
-    assertEquals("1 metric value should have been removed", 3, cachedMetric.getMetricValues().size());
-    // first metric value was removed, starttime == second metric value starttime
-    assertEquals(DEFAULT_START_TIME + maxEvictionTime * 2, cachedMetric.getStartTime());
-  }
-
-  private TimelineMetric createTimelineMetricSingleValue(final long startTime) {
-    TreeMap<Long, Double> values = new TreeMap<Long, Double>();
-    values.put(startTime, 0.0);
-    return createTimelineMetric(values, startTime);
-
-  }
-  private TimelineMetric createTimelineMetric(Map<Long, Double> metricValues,
-                                              long startTime) {
-    TimelineMetric timelineMetric = new TimelineMetric();
-    timelineMetric.setMetricName(METRIC_NAME);
-    timelineMetric.setHostName("Test hostName");
-    timelineMetric.setAppId("test serviceName");
-    timelineMetric.setStartTime(startTime);
-    timelineMetric.setType("Number");
-    timelineMetric.setMetricValues(new TreeMap<Long, Double>(metricValues));
-    return timelineMetric;
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-flume-sink/pom.xml b/ambari-metrics/ambari-metrics-flume-sink/pom.xml
deleted file mode 100644
index 72ebb27..0000000
--- a/ambari-metrics/ambari-metrics-flume-sink/pom.xml
+++ /dev/null
@@ -1,175 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
-                             http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <parent>
-    <artifactId>ambari-metrics</artifactId>
-    <groupId>org.apache.ambari</groupId>
-    <version>2.0.0.0-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>ambari-metrics-flume-sink</artifactId>
-  <version>2.0.0.0-SNAPSHOT</version>
-  <name>Ambari Metrics Flume Sink</name>
-  <packaging>jar</packaging>
-  <build>
-    <plugins>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assemblies/jar-with-common.xml</descriptor>
-              </descriptors>
-              <attach>false</attach>
-              <tarLongFileMode>gnu</tarLongFileMode>
-              <appendAssemblyId>false</appendAssemblyId>
-              <finalName>${project.artifactId}-with-common-${project.version}</finalName>
-            </configuration>
-            <id>build-jar</id>
-            <phase>package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.0</version>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <version>1.8</version>
-        <executions>
-          <execution>
-            <id>parse-version</id>
-            <phase>validate</phase>
-            <goals>
-              <goal>parse-version</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>regex-property</id>
-            <goals>
-              <goal>regex-property</goal>
-            </goals>
-            <configuration>
-              <name>ambariVersion</name>
-              <value>${project.version}</value>
-              <regex>^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*</regex>
-              <replacement>$1.$2.$3.$4</replacement>
-              <failIfNoMatch>false</failIfNoMatch>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>com.github.goldin</groupId>
-        <artifactId>copy-maven-plugin</artifactId>
-        <version>0.2.5</version>
-        <executions>
-          <execution>
-            <id>create-archive</id>
-            <phase>none</phase>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.vafer</groupId>
-        <artifactId>jdeb</artifactId>
-        <version>1.0.1</version>
-        <executions>
-          <execution>
-            <!--Stub execution on direct plugin call - workaround for ambari deb build process-->
-            <id>stub-execution</id>
-            <phase>none</phase>
-            <goals>
-              <goal>jdeb</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <skip>true</skip>
-          <attach>false</attach>
-          <submodules>false</submodules>
-          <controlDir>${project.basedir}/../src/main/package/deb/control</controlDir>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.flume</groupId>
-      <artifactId>flume-ng-core</artifactId>
-      <version>1.5.1</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <artifactId>libthrift</artifactId>
-          <groupId>org.apache.thrift</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jetty-util</artifactId>
-          <groupId>org.mortbay.jetty</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-metrics-common</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-      <version>1.9.13</version>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-      <version>4.10</version>
-    </dependency>
-    <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
-      <version>3.2</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-api-easymock</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-module-junit4</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>com.google.guava</groupId>
-      <artifactId>guava</artifactId>
-      <version>14.0.1</version>
-    </dependency>
-  </dependencies>
-</project>
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/main/assemblies/empty.xml b/ambari-metrics/ambari-metrics-flume-sink/src/main/assemblies/empty.xml
deleted file mode 100644
index 35738b1..0000000
--- a/ambari-metrics/ambari-metrics-flume-sink/src/main/assemblies/empty.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-    <id>empty</id>
-    <formats/>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/main/assemblies/jar-with-common.xml b/ambari-metrics/ambari-metrics-flume-sink/src/main/assemblies/jar-with-common.xml
deleted file mode 100644
index bfd8b29..0000000
--- a/ambari-metrics/ambari-metrics-flume-sink/src/main/assemblies/jar-with-common.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<assembly>
-  <id>jar-with-common</id>
-  <formats>
-    <format>jar</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <dependencySets>
-    <dependencySet>
-      <fileMode>644</fileMode>
-      <outputDirectory>/</outputDirectory>
-      <unpack>true</unpack>
-      <includes>
-        <include>org.apache.ambari:ambari-metrics-common</include>
-        <include>org.apache.ambari:ambari-metrics-flume-sink</include>
-      </includes>
-    </dependencySet>
-  </dependencySets>
-</assembly>
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/main/conf/flume-metrics2.properties.j2 b/ambari-metrics/ambari-metrics-flume-sink/src/main/conf/flume-metrics2.properties.j2
deleted file mode 100644
index 58c5f09..0000000
--- a/ambari-metrics/ambari-metrics-flume-sink/src/main/conf/flume-metrics2.properties.j2
+++ /dev/null
@@ -1,31 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-collector=http://localhost:6188
-collectionFrequency=60000
-maxRowCacheSize=10000
-sendInterval={{metrics_report_interval}}000
-clusterReporterAppId=nimbus
-host_in_memory_aggregation = {{host_in_memory_aggregation}}
-host_in_memory_aggregation_port = {{host_in_memory_aggregation_port}}
-{% if is_aggregation_https_enabled %}
-host_in_memory_aggregation_protocol = {{host_in_memory_aggregation_protocol}}
-{% endif %}
-
-# Metric names having type COUNTER
-counters=EventTakeSuccessCount,EventPutSuccessCount,EventTakeAttemptCount,EventPutAttemptCount
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
deleted file mode 100644
index 720c371..0000000
--- a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
+++ /dev/null
@@ -1,272 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.flume;
-
-import org.apache.commons.lang.math.NumberUtils;
-import org.apache.flume.Context;
-import org.apache.flume.FlumeException;
-import org.apache.flume.instrumentation.MonitorService;
-import org.apache.flume.instrumentation.util.JMXPollUtil;
-import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.hadoop.metrics2.sink.timeline.UnableToConnectException;
-import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
-import org.apache.hadoop.metrics2.sink.timeline.configuration.Configuration;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-public class FlumeTimelineMetricsSink extends AbstractTimelineMetricsSink implements MonitorService {
-  private String collectorUri;
-  private String protocol;
-  // Key - component(instance_id)
-  private Map<String, TimelineMetricsCache> metricsCaches;
-  private int maxRowCacheSize;
-  private int metricsSendInterval;
-  private ScheduledExecutorService scheduledExecutorService;
-  private long pollFrequency;
-  private String hostname;
-  private String port;
-  private Collection<String> collectorHosts;
-  private String zookeeperQuorum;
-  private final static String COUNTER_METRICS_PROPERTY = "counters";
-  private final Set<String> counterMetrics = new HashSet<String>();
-  private int timeoutSeconds = 10;
-  private boolean setInstanceId;
-  private String instanceId;
-  private boolean hostInMemoryAggregationEnabled;
-  private int hostInMemoryAggregationPort;
-  private String hostInMemoryAggregationProtocol;
-
-
-  @Override
-  public void start() {
-    LOG.info("Starting Flume Metrics Sink");
-    TimelineMetricsCollector timelineMetricsCollector = new TimelineMetricsCollector();
-    if (scheduledExecutorService == null || scheduledExecutorService.isShutdown() || scheduledExecutorService.isTerminated()) {
-      scheduledExecutorService = Executors.newSingleThreadScheduledExecutor();
-    }
-    scheduledExecutorService.scheduleWithFixedDelay(timelineMetricsCollector, 0,
-        pollFrequency, TimeUnit.MILLISECONDS);
-  }
-
-  @Override
-  public void stop() {
-    LOG.info("Stopping Flume Metrics Sink");
-    scheduledExecutorService.shutdown();
-  }
-
-  @Override
-  public void configure(Context context) {
-    LOG.info("Context parameters " + context);
-    try {
-      hostname = InetAddress.getLocalHost().getHostName();
-      //If not FQDN , call  DNS
-      if ((hostname == null) || (!hostname.contains("."))) {
-        hostname = InetAddress.getLocalHost().getCanonicalHostName();
-      }
-      hostname = hostname.toLowerCase();
-
-    } catch (UnknownHostException e) {
-      LOG.error("Could not identify hostname.");
-      throw new FlumeException("Could not identify hostname.", e);
-    }
-    Configuration configuration = new Configuration("/flume-metrics2.properties");
-    timeoutSeconds = Integer.parseInt(configuration.getProperty(METRICS_POST_TIMEOUT_SECONDS,
-        String.valueOf(DEFAULT_POST_TIMEOUT_SECONDS)));
-    maxRowCacheSize = Integer.parseInt(configuration.getProperty(MAX_METRIC_ROW_CACHE_SIZE,
-        String.valueOf(TimelineMetricsCache.MAX_RECS_PER_NAME_DEFAULT)));
-    metricsSendInterval = Integer.parseInt(configuration.getProperty(METRICS_SEND_INTERVAL,
-        String.valueOf(TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS)));
-    metricsCaches = new HashMap<String, TimelineMetricsCache>();
-    collectorHosts = parseHostsStringIntoCollection(configuration.getProperty(COLLECTOR_HOSTS_PROPERTY));
-    zookeeperQuorum = configuration.getProperty("zookeeper.quorum");
-    protocol = configuration.getProperty(COLLECTOR_PROTOCOL, "http");
-    port = configuration.getProperty(COLLECTOR_PORT, "6188");
-    setInstanceId = Boolean.valueOf(configuration.getProperty(SET_INSTANCE_ID_PROPERTY, "false"));
-    instanceId = configuration.getProperty(INSTANCE_ID_PROPERTY, "");
-
-    hostInMemoryAggregationEnabled = Boolean.getBoolean(configuration.getProperty(HOST_IN_MEMORY_AGGREGATION_ENABLED_PROPERTY, "false"));
-    hostInMemoryAggregationPort = Integer.valueOf(configuration.getProperty(HOST_IN_MEMORY_AGGREGATION_PORT_PROPERTY, "61888"));
-    hostInMemoryAggregationProtocol = configuration.getProperty(HOST_IN_MEMORY_AGGREGATION_PROTOCOL_PROPERTY, "http");
-    // Initialize the collector write strategy
-    super.init();
-
-    if (protocol.contains("https") || hostInMemoryAggregationProtocol.contains("https")) {
-      String trustStorePath = configuration.getProperty(SSL_KEYSTORE_PATH_PROPERTY).trim();
-      String trustStoreType = configuration.getProperty(SSL_KEYSTORE_TYPE_PROPERTY).trim();
-      String trustStorePwd = configuration.getProperty(SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
-      loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
-    }
-    collectorUri = constructTimelineMetricUri(protocol, findPreferredCollectHost(), port);
-
-    pollFrequency = Long.parseLong(configuration.getProperty("collectionFrequency"));
-
-    String[] metrics = configuration.getProperty(COUNTER_METRICS_PROPERTY).trim().split(",");
-    Collections.addAll(counterMetrics, metrics);
-  }
-
-  @Override
-  public String getCollectorUri(String host) {
-    return constructTimelineMetricUri(protocol, host, port);
-  }
-
-  @Override
-  protected String getCollectorProtocol() {
-    return protocol;
-  }
-
-  @Override
-  protected String getCollectorPort() {
-    return port;
-  }
-
-  @Override
-  protected int getTimeoutSeconds() {
-    return timeoutSeconds;
-  }
-
-  @Override
-  protected String getZookeeperQuorum() {
-    return zookeeperQuorum;
-  }
-
-  @Override
-  protected Collection<String> getConfiguredCollectorHosts() {
-    return collectorHosts;
-  }
-
-  @Override
-  protected String getHostname() {
-    return hostname;
-  }
-
-  @Override
-  protected boolean isHostInMemoryAggregationEnabled() {
-    return hostInMemoryAggregationEnabled;
-  }
-
-  @Override
-  protected int getHostInMemoryAggregationPort() {
-    return hostInMemoryAggregationPort;
-  }
-
-  @Override
-  protected String getHostInMemoryAggregationProtocol() {
-    return hostInMemoryAggregationProtocol;
-  }
-
-  public void setPollFrequency(long pollFrequency) {
-    this.pollFrequency = pollFrequency;
-  }
-
-  //Test hepler method
-  protected void setMetricsCaches(Map<String, TimelineMetricsCache> metricsCaches) {
-    this.metricsCaches = metricsCaches;
-  }
-
-  /**
-   * Worker which polls JMX for all mbeans with
-   * {@link javax.management.ObjectName} within the flume namespace:
-   * org.apache.flume. All attributes of such beans are sent
-   * to the metrics collector service.
-   */
-  class TimelineMetricsCollector implements Runnable {
-    @Override
-    public void run() {
-      LOG.debug("Collecting Metrics for Flume");
-      try {
-        Map<String, Map<String, String>> metricsMap = JMXPollUtil.getAllMBeans();
-        long currentTimeMillis = System.currentTimeMillis();
-        for (String component : metricsMap.keySet()) {
-          Map<String, String> attributeMap = metricsMap.get(component);
-          LOG.debug("Attributes for component " + component);
-          processComponentAttributes(currentTimeMillis, component, attributeMap);
-        }
-      } catch (UnableToConnectException uce) {
-        LOG.warn("Unable to send metrics to collector by address:" + uce.getConnectUrl());
-      } catch (Exception e) {
-        LOG.error("Unexpected error", e);
-      }
-      LOG.debug("Finished collecting Metrics for Flume");
-    }
-
-    private void processComponentAttributes(long currentTimeMillis, String component, Map<String, String> attributeMap) throws IOException {
-      List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
-      if (!metricsCaches.containsKey(component)) {
-        metricsCaches.put(component, new TimelineMetricsCache(maxRowCacheSize, metricsSendInterval));
-      }
-      TimelineMetricsCache metricsCache = metricsCaches.get(component);
-      for (String attributeName : attributeMap.keySet()) {
-        String attributeValue = attributeMap.get(attributeName);
-        if (NumberUtils.isNumber(attributeValue)) {
-          LOG.info(attributeName + " = " + attributeValue);
-          TimelineMetric timelineMetric = createTimelineMetric(currentTimeMillis,
-              component, attributeName, attributeValue);
-          // Put intermediate values into the cache until it is time to send
-          metricsCache.putTimelineMetric(timelineMetric, isCounterMetric(attributeName));
-
-          TimelineMetric cachedMetric = metricsCache.getTimelineMetric(attributeName);
-
-          if (cachedMetric != null) {
-            metricList.add(cachedMetric);
-          }
-        }
-      }
-
-      if (!metricList.isEmpty()) {
-        TimelineMetrics timelineMetrics = new TimelineMetrics();
-        timelineMetrics.setMetrics(metricList);
-        emitMetrics(timelineMetrics);
-      }
-    }
-
-    private TimelineMetric createTimelineMetric(long currentTimeMillis, String component, String attributeName, String attributeValue) {
-      TimelineMetric timelineMetric = new TimelineMetric();
-      timelineMetric.setMetricName(attributeName);
-      timelineMetric.setHostName(hostname);
-      if (setInstanceId) {
-        timelineMetric.setInstanceId(instanceId + component);
-      } else {
-        timelineMetric.setInstanceId(component);
-      }
-      timelineMetric.setAppId("FLUME_HANDLER");
-      timelineMetric.setStartTime(currentTimeMillis);
-      timelineMetric.getMetricValues().put(currentTimeMillis, Double.parseDouble(attributeValue));
-      return timelineMetric;
-    }
-  }
-
-  private boolean isCounterMetric(String attributeName) {
-    return counterMetrics.contains(attributeName);
-  }
-}
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
deleted file mode 100644
index 99da43f..0000000
--- a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics2.sink.flume;
-
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.powermock.api.easymock.PowerMock.mockStatic;
-import static org.powermock.api.easymock.PowerMock.replay;
-import static org.powermock.api.easymock.PowerMock.resetAll;
-import static org.powermock.api.easymock.PowerMock.verifyAll;
-
-import java.net.InetAddress;
-import java.util.Collections;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.flume.Context;
-import org.apache.flume.instrumentation.util.JMXPollUtil;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
-import org.apache.hadoop.metrics2.sink.timeline.configuration.Configuration;
-import org.easymock.EasyMock;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.easymock.PowerMock;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({JMXPollUtil.class, Executors.class, FlumeTimelineMetricsSink.class})
-public class FlumeTimelineMetricsSinkTest {
-  @Test
-  public void testNonNumericMetricMetricExclusion() throws InterruptedException {
-    FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
-    FlumeTimelineMetricsSink.TimelineMetricsCollector collector =
-      flumeTimelineMetricsSink.new TimelineMetricsCollector();
-    mockStatic(JMXPollUtil.class);
-    EasyMock.expect(JMXPollUtil.getAllMBeans()).andReturn(
-        Collections.singletonMap("component1", Collections.singletonMap("key1", "value1"))).once();
-    replay(JMXPollUtil.class);
-    collector.run();
-    verifyAll();
-  }
-
-  @Test
-  public void testNumericMetricSubmission() throws InterruptedException {
-    FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
-    FlumeTimelineMetricsSink.TimelineMetricsCollector collector =
-      flumeTimelineMetricsSink.new TimelineMetricsCollector();
-    mockStatic(JMXPollUtil.class);
-    EasyMock.expect(JMXPollUtil.getAllMBeans()).andReturn(
-        Collections.singletonMap("component1", Collections.singletonMap("key1", "42"))).once();
-    replay(JMXPollUtil.class);
-    collector.run();
-    verifyAll();
-  }
-
-  private TimelineMetricsCache getTimelineMetricsCache(FlumeTimelineMetricsSink flumeTimelineMetricsSink) {
-    TimelineMetricsCache timelineMetricsCache = EasyMock.createNiceMock(TimelineMetricsCache.class);
-    flumeTimelineMetricsSink.setMetricsCaches(Collections.singletonMap("SINK",timelineMetricsCache));
-    EasyMock.expect(timelineMetricsCache.getTimelineMetric("key1"))
-        .andReturn(new TimelineMetric()).once();
-    timelineMetricsCache.putTimelineMetric(anyObject(TimelineMetric.class));
-    EasyMock.expectLastCall().once();
-    return timelineMetricsCache;
-  }
-
-  @Test
-  public void testMonitorRestart() throws InterruptedException {
-    FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
-    TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(flumeTimelineMetricsSink);
-    flumeTimelineMetricsSink.setPollFrequency(1);
-    mockStatic(Executors.class);
-    ScheduledExecutorService executor = createNiceMock(ScheduledExecutorService.class);
-    expect(Executors.newSingleThreadScheduledExecutor()).andReturn(executor);
-    FlumeTimelineMetricsSink.TimelineMetricsCollector collector = anyObject();
-    TimeUnit unit = anyObject();
-    expect(executor.scheduleWithFixedDelay(collector, eq(0), eq(1), unit)).andReturn(null);
-    executor.shutdown();
-    replay(timelineMetricsCache, Executors.class, executor);
-
-    flumeTimelineMetricsSink.start();
-    flumeTimelineMetricsSink.stop();
-
-    verifyAll();
-  }
-
-  @Test
-  public void testMetricsRetrievalExceptionTolerance() throws InterruptedException {
-    FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
-    FlumeTimelineMetricsSink.TimelineMetricsCollector collector =
-      flumeTimelineMetricsSink.new TimelineMetricsCollector();
-    mockStatic(JMXPollUtil.class);
-    EasyMock.expect(JMXPollUtil.getAllMBeans()).
-        andThrow(new RuntimeException("Failed to retrieve Flume Properties")).once();
-    replay(JMXPollUtil.class);
-    collector.run();
-    verifyAll();
-  }
-
-  @Test
-  @PrepareForTest({Configuration.class, FlumeTimelineMetricsSink.class})
-  public void testGettingFqdn() throws Exception {
-    FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
-    Configuration config = createNiceMock(Configuration.class);
-
-    expect(config.getProperty(anyString(), anyString()))
-      .andReturn("60")
-      .anyTimes();
-    expect(config.getProperty(anyString()))
-      .andReturn("60")
-      .anyTimes();
-    replay(config);
-
-    PowerMock.expectNew(Configuration.class, anyString())
-      .andReturn(config);
-    replay(Configuration.class);
-
-    // getHostName() returned FQDN
-    InetAddress address = createNiceMock(InetAddress.class);
-    expect(address.getHostName()).andReturn("hostname.domain").once();
-    replay(address);
-
-    mockStatic(InetAddress.class);
-    expect(InetAddress.getLocalHost()).andReturn(address).once();
-    replay(InetAddress.class);
-
-    flumeTimelineMetricsSink.configure(new Context());
-    verifyAll();
-
-    resetAll();
-
-    PowerMock.expectNew(Configuration.class, anyString())
-      .andReturn(config);
-    replay(Configuration.class);
-
-    // getHostName() returned short hostname, getCanonicalHostName() called
-    address = createNiceMock(InetAddress.class);
-    expect(address.getHostName()).andReturn("hostname").once();
-    expect(address.getCanonicalHostName()).andReturn("hostname.domain").once();
-    replay(address);
-
-    mockStatic(InetAddress.class);
-    expect(InetAddress.getLocalHost()).andReturn(address).times(2);
-    replay(InetAddress.class);
-
-    flumeTimelineMetricsSink.configure(new Context());
-    verifyAll();
-
-  }
-
-}
diff --git a/ambari-metrics/ambari-metrics-grafana/README.md b/ambari-metrics/ambari-metrics-grafana/README.md
deleted file mode 100644
index af2b5cb..0000000
--- a/ambari-metrics/ambari-metrics-grafana/README.md
+++ /dev/null
@@ -1,281 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-# AMS (Ambari Metrics Service) Datasource Plugin for Grafana
-
-Use **ambari-metrics** to visualize metrics exposed via AMS in Grafana. 
-
-### If you already have Ambari Metrics UI as a part of your AMS Install, [go here](#createdash) to get started
-
-
-**ToC**
- - [Install Grafana](#installg)
- - [Install Datasource Plugin](#installam)
- - [Access Grafana](#accessgraf)
- - [Add Datasource to Grafana](#addds)
-  	- [Test Datasource](#testds)
- - [Create Dashboard](#createdash)
- - [Add a Graph](#addgraph)
- - [Save Dashboard](#savedash)
- - [Time Ranges](#timerange)
- - [Edit Panel/Graph](#editpanel)
- - [Templated Dashboards](#templating)
-    - [Multi Host Templated Dashboards](#multi-templating)
-
-
-----------
-![Full Dashboard](screenshots/full-dashboard.png)
-
-----------
-<a name="installg"></a>
-### Install Grafana
-
-
-You can install Grafana on any host.  It does not need to be co-located with Ambari Metrics Collector.  The only requirement is that it has network access to Ambari Metrics Collector.
-
-**Install on CentOS/Red Hat:**
-```
-sudo yum install https://grafanarel.s3.amazonaws.com/builds/grafana-2.6.0-1.x86_64.rpm
-```
-
-**Install on Ubuntu/Debian:**
-```
-wget https://grafanarel.s3.amazonaws.com/builds/grafana_2.6.0_amd64.deb
-sudo apt-get install -y adduser libfontconfig
-sudo dpkg -i grafana_2.6.0_amd64.deb
-```
-
-**Install on SUSE/SLES:**
-```
-sudo rpm -i --nodeps grafana-2.6.0-1.x86_64.rpm
-```
-<a name="installam"></a> 
-### Deploy ambari-metrics 
-
-**On your Grafana Server**
-
-```
-cp -R ambari/ambari-metrics/ambari-metrics-grafana/ambari-metrics /usr/share/grafana/public/app/plugins/datasource
-```
-
-### Start Grafana
-
-```
-sudo service grafana-server start
-```
-
-<a name="accessgraf"></a> 
-### Access Grafana
-
-```
-http://GRAFANA_HOST:3000 
-```
-
----
-
-<a name="addds"></a>
-## Add Ambari Metrics Datasource in Grafana UI
-
-**Add a Datasource**
->	- Click on "Datasources"
-> 	- Click on "Add New" at the top 
-
-![add-datasource](screenshots/1-add-datasource.png)
-
-**Add a Datasource (continued)**
-
-> 	1. Name of your Datasource
->	2. Type = AmbariMetrics
-> 	3. Host+Port of your AMS installation. (usually host:6188) 
->  		- No trailing slashes
-> 		- Nothing else needs to be changed
->  		- Click on Save.
-
-![datasource-details](screenshots/2-datasource-details.png)
-
-
-<a name="testds"></a>
-**Test your Datasource**
-
->	To make sure it's all working, click on **Test Connection** and you should see a message that says "Data source is working". 
-
-
-![test-datasource](screenshots/3-test-datasource.png)
-
----
-
-<a name="createdash"></a>
-## Creating a Dashboard
-
-**To create a dashboard**
-
->	- Click on Dashboards on the left
->	- Click on "Home"
->	- Click on New at the bottom of the dropdown 
-
-![Dashboard Dropdown](screenshots/4-dashboard-dropdown.png)
-
-
-
-**To add a panel to your newly created dashboard**
-
->	- Click on the green button on the left(see image below)
->	- This will expand a flyout menu that will allow you to add a panel
->	- Choose Graph / Table / Single Stat
-
-![Add Panel](screenshots/5-dashboard-graph-menu.png)
-
-![Types of Panels](screenshots/6-graph-panels.png)
-
-
----
-
-<a name="addgraph"></a>
-**To add a Graph**
-
-
->	- Choose the Datasource you created earlier
->	- Once you've chosen the datasource, you should see the query editor show you some options
-
-![Add a Graph](screenshots/7-choose-datasource.png)
-
-
-
->	- Choose the component you wish to see metrics for
-
-![Add a Graph](screenshots/8-choose-component.png)
-
-
-
->	- Based on the component chosen, you should now see a list of metrics for it
-
-![Add a Graph](screenshots/9-choose-metric.png)
-
-
-
->	- Choose hostname from the list of hosts if you wish to see metrics for a specific host.
->		- if hostname isn't chosen, metrics will be shown on a service component level. 
-
-![Add a Graph](screenshots/10-choose-hostname.png)
-
-
-> 	- By default the aggregator is avg. You can change it via the dropdown
-> 	- You can choose to enable Rate by selecting the checkbox.
-> 	- You can specify precision by checking the box and then selecting "days, hours, minutes or seconds"
-
-![Select Options](screenshots/11-choose-agg-rate-precision.png)
-
-
-**To change the title of the Panel**
-
->	- Click on the "General" tab
->	- Enter the name to change the title of the panel
-
-![Change Panel Title](screenshots/12-change-panel-title.png)
-
-**To change the Units for your metric**
-
->	- You can edit the units of your graph by clicking on **Axes & Grid** tab and clicking on "unit" as shown.
-
-![Change Units](screenshots/15-change-units.png)
-
-**To customise your graphs**
-
-> 	- You can customise your graph by clicking on the **Display Styles** tab.
-> 	- For ex: you can change the color of a specific metric by choosing a series specific override at the bottom.
-
-![series specific override](screenshots/17-series-specific-override.png)
-
-
-<a name="savedash"></a>
-**To Save the Dashboard**
-
-> 	- Click on the save icon next to the dashboard list dropdown on the top to save your dashboard.
-
-![Save Dashboard](screenshots/13-save-dashboard.png)
-
-<a name="editpanel"></a>
-**To Edit a Graph**
-
-> 	- Click on the title of your graph/panel and click on edit.
-
-![Edit Graph](screenshots/19-edit-graph.png)
-
-
----
-<a name="timerange"></a>
-### Time Ranges
-
-**To change the Time Range**
-
->	- To change the timerange click on the top right of your UI.
->		- This setting affects all your graphs inside the dashboard. If you wish to customise time for a specific graph [look here](#timeshift)
-> 	- You can use the quick ranges provided or choose a time range of your choice. You can also choose a refresh duration for your dashboard or leave it at "off" to manually refresh.
-
-![Timerange](screenshots/14-change-timerange.png)
-
-<a name="timeshift"></a>
-**To change the time range of one graph only**
-
->	- Use this in case you wish to change the time range for a specific graph without affecting the other graphs in your dashboard
->		- Click on the **Time Range** tab of your Graph
->		- You can then enter a value in the "Override Relative time" input box
->		- You will be able to confirm that this change has occured by looking at the top right of your graph which will show the override message.
->		- You can choose to hide this message if you wish to do so (by checking the "hide time override info")
-
-![Timerange Override](screenshots/18-override-time.png)
-
-
----
-
-<a name="templating"></a>
-### Templating
-
-**Templating allows you to dynamically change graphs by selecting your host from dropdown. To created a templated dashboard, you can follow these steps.**
-
-> 1. Click on the "cog" on the top, select "Templating"
-> 2. Click on "+ New"
-> 3. Enter a name for your variable. Ex: "hosts".
-> 4. Type: query
-> 5. Datasource: This should be set to the name you had chosen for your Datasource. Default is "AMBARI_METRICS"
-> 6. Query: This needs to be "hosts". It is case sensitive as well.
-> 7. Once you've entered the query and you focus your cursor anywhere else, you should see the "Preview of values" updated with the hostnames in your cluster.
-> 8. You can close the Templating Variables section.
-> 9. You should now have a dropdown on your dashboard with hosts from your cluster.
-> 10. When you now add a graph, and select your component and metric, the plotted graph will show you metrics for the selected hostname from the dropdown.
-> 11. The legend on the graph will also now update with the selected host.
-
-**Templated dashboards do support multiple metrics in a single graph.** 
-
-
-![Templating](screenshots/20-templating.png)
-
----
-
-<a name="multi-templating"></a>
-### Multi Host Templated Dashboards.
-
-**Templated dashboards now have the ability to filter graphs based on a single host or multiple hosts.**
-
-> 1. Once you've created your templated dashboard, you can edit it gain by clicking on the "cog" on the top, select "Templating".
-> 2. Click on "Edit" for your templating variable.
-> 3. To be able to select Multiiple Hosts, set multi-value selection to "enable" and leave multi-format set to "glob".
-> 4. To have an option for All hosts, select All Value, and set it to "*" and All format to "wildcard".
-> 5. Hit Update and close the templating variables options and you should be now able to select multiple hosts from the dropdown (or "all" hosts at once.)
-
-
-![Multi Host Templating](screenshots/21-multi-templating.png)
diff --git a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
deleted file mode 100644
index e7cd850..0000000
--- a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
+++ /dev/null
@@ -1,1092 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-define([
-      'angular',
-      'lodash',
-      'jquery',
-      './directives',
-      './queryCtrl'
-    ],
-    function (angular, _) {
-      'use strict';
-
-      var module = angular.module('grafana.services');
-
-      module.factory('AmbariMetricsDatasource', function ($q, backendSrv, templateSrv) {
-        /**
-         * AMS Datasource Constructor
-         */
-        function AmbariMetricsDatasource(datasource) {
-          this.name = datasource.name;
-          this.url = datasource.url;
-          this.initMetricAppidMapping();
-        }
-        var allMetrics = {};
-        var appIds = [];
-
-        //We get a list of components and their associated metrics.
-        AmbariMetricsDatasource.prototype.initMetricAppidMapping = function () {
-          return this.doAmbariRequest({ url: '/ws/v1/timeline/metrics/metadata' })
-            .then(function (items) {
-              items = items.data;
-              allMetrics = {};
-              appIds = [];
-              _.forEach(items, function (metric,app) {
-                metric.forEach(function (component) {
-                  if (!allMetrics[app]) {
-                    allMetrics[app] = [];
-                  }
-                  allMetrics[app].push(component.metricname);
-                });
-              });
-              //We remove a couple of components from the list that do not contain any
-              //pertinent metrics.
-              delete allMetrics["timeline_metric_store_watcher"];
-              delete allMetrics["amssmoketestfake"];
-              appIds = Object.keys(allMetrics);
-            });
-        };
-
-        /**
-         * AMS Datasource  Authentication
-         */
-        AmbariMetricsDatasource.prototype.doAmbariRequest = function (options) {
-          if (this.basicAuth || this.withCredentials) {
-            options.withCredentials = true;
-          }
-          if (this.basicAuth) {
-            options.headers = options.headers || {};
-            options.headers.Authorization = this.basicAuth;
-          }
-
-          options.url = this.url + options.url;
-          options.inspect = {type: 'ambarimetrics'};
-
-          return backendSrv.datasourceRequest(options);
-        };
-
-        /**
-         * AMS Datasource  Query
-         */
-        AmbariMetricsDatasource.prototype.query = function (options) {
-          var emptyData = function (metric) {
-            var legend = metric.alias ? metric.alias : metric.metric;
-            return {
-              data: {
-                target: legend,
-                datapoints: []
-              }
-            };
-          };
-          var self = this;
-          var getMetricsData = function (target) {
-            var alias = target.alias ? target.alias : target.metric;
-            if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "yarnqueues") {
-              alias = alias + ' on ' + target.qmetric; }
-            if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "kafka-topics") {
-            alias = alias + ' on ' + target.kbTopic; }
-            return function (res) {
-              res = res.data;
-              console.log('processing metric ' + target.metric);
-              if (!res.metrics[0] || target.hide) {
-                return $q.when(emptyData(target));
-              }
-              var series = [];
-              var metricData = res.metrics[0].metrics;
-              // Added hostname to legend for templated dashboards.
-              var hostLegend = res.metrics[0].hostname ? ' on ' + res.metrics[0].hostname : '';
-              var timeSeries = {};
-              timeSeries = {
-                target: alias + hostLegend,
-                datapoints: []
-              };
-              for (var k in metricData){
-                if (metricData.hasOwnProperty(k)) {
-                  timeSeries.datapoints.push([metricData[k], (k - k % 1000)]);
-                }
-              }
-              series.push(timeSeries);
-              return $q.when({data: series});
-            };
-          };
-          // To speed up querying on templatized dashboards.
-          var allHostMetricsData = function (target) {
-            var alias = target.alias ? target.alias : target.metric;
-            if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "hbase-users") {
-            alias = alias + ' for ' + target.hbUser; }
-            // Aliases for Storm Topologies and components under a topology.
-            if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "topologies" &&
-            !templateSrv.variables[1]) {
-              alias = alias + ' on ' + target.sTopology;
-            }
-            if(!_.isEmpty(templateSrv.variables[1]) && templateSrv.variables[1].name === "component") {
-              alias = alias + ' on ' + target.sTopology + ' for ' + target.sComponent;
-            }
-
-            // Aliases for Druid Datasources.
-            if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "druidDataSources" &&
-                        !templateSrv.variables[1]) {
-              alias = alias.replace('$druidDataSource', target.sDataSource);
-            }
-            return function (res) {
-              res = res.data;
-              console.log('processing metric ' + target.metric);
-              if (!res.metrics[0] || target.hide) {
-                return $q.when(emptyData(target));
-              }
-              var series = [];
-              var timeSeries = {};
-              var metricData = res.metrics;
-              _.map(metricData, function (data) {
-                var totalCountFlag = false;
-                var aliasSuffix = data.hostname ? ' on ' + data.hostname : '';
-                var op = '';
-                var user = '';
-                if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "hbase-tables") {
-                  var tableName = "Tables.";
-                  var tableSuffix = data.metricname.substring(data.metricname.indexOf(tableName) + tableName.length,
-                  data.metricname.lastIndexOf("_metric"));
-                  aliasSuffix = ' on ' + tableSuffix;
-                }
-                if(templateSrv.variables[0].query === "callers") {
-                  alias = data.metricname.substring(data.metricname.indexOf('(')+1, data.metricname.indexOf(')'));
-                }
-                // Set legend and alias for HDFS - TopN dashboard
-                if(data.metricname.indexOf('dfs.NNTopUserOpCounts') === 0) {
-                  var metricname_arr = data.metricname.split(".");
-                  _.map(metricname_arr, function (segment) {
-                    if(segment.indexOf('op=') === 0) {
-                      var opKey = 'op=';
-                      op = segment.substring(segment.indexOf(opKey) + opKey.length);
-                    } else if(segment.indexOf('user=') === 0) {
-                      var userKey = 'user=';
-                      user = segment.substring(segment.indexOf(userKey) + userKey.length);
-                    }
-                  });
-                  // Check if metric is TotalCount
-                  if(data.metricname.indexOf('TotalCount') > 0) {
-                    totalCountFlag = true;
-                    if (op !== '*') {
-                      alias = op;
-                    } else {
-                      alias = 'Total Count';
-                    }
-                  } else if (op !== '*') {
-                    alias = op + ' by ' + user;
-                  } else {
-                    alias = user;
-                  }
-                  aliasSuffix = '';
-                }
-                if (data.appid.indexOf('ambari_server') === 0) {
-                  alias = data.metricname;
-                  aliasSuffix = '';
-                }
-                timeSeries = {
-                  target: alias + aliasSuffix,
-                  datapoints: []
-                };
-                for (var k in data.metrics){
-                  if (data.metrics.hasOwnProperty(k)) {
-                    timeSeries.datapoints.push([data.metrics[k], (k - k % 1000)]);
-                  }
-                }
-                if( (user !== '*') || (totalCountFlag) ) {
-                  series.push(timeSeries);
-                }
-              });
-              return $q.when({data: series});
-            };
-          };
-          var getHostAppIdData = function(target) {
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision=' 
-            + target.precision;
-            var instanceId = typeof target.cluster == 'undefined'  ? '' : '&instanceId=' + target.cluster;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.metric + metricTransform +
-            metricAggregator + "&hostname=" + target.hosts + '&appId=' + target.app + instanceId + '&startTime=' + from +
-            '&endTime=' + to + precision + seriesAggregator }).then(
-              getMetricsData(target)
-            );
-          };
-          //Check if it's a templated dashboard.
-          var templatedClusters = templateSrv.variables.filter(function(o) { return o.name === "cluster"});
-          var templatedCluster = (_.isEmpty(templatedClusters)) ? '' : templatedClusters[0].options.filter(function(cluster)
-            { return cluster.selected; }).map(function(clusterName) { return clusterName.value; });
-
-          var templatedHosts = templateSrv.variables.filter(function(o) { return o.name === "hosts"});
-          var templatedHost = (_.isEmpty(templatedHosts)) ? '' : templatedHosts[0].options.filter(function(host)
-            { return host.selected; }).map(function(hostName) { return hostName.value; });
-
-          var tComponents = _.isEmpty(templateSrv.variables) ? '' : templateSrv.variables.filter(function(variable)
-            { return variable.name === "components"});
-          var tComponent = _.isEmpty(tComponents) ? '' : tComponents[0].current.value;
-
-          var getServiceAppIdData = function(target) {
-            var tCluster = (_.isEmpty(templateSrv.variables))? templatedCluster : '';
-            var instanceId = typeof tCluster == 'undefined'  ? '' : '&instanceId=' + tCluster;
-            var tHost = (_.isEmpty(templateSrv.variables)) ? templatedHost : target.templatedHost;
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.metric + metricTransform
-              + metricAggregator + '&hostname=' + tHost + '&appId=' + target.app + instanceId + '&startTime=' + from +
-              '&endTime=' + to + precision + seriesAggregator }).then(
-              getMetricsData(target)
-            );
-          };
-          // To speed up querying on templatized dashboards.
-          var getAllHostData = function(target) {
-            var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var topN = ""; var isBottomN = "";
-            if (!_.isEmpty(templateSrv.variables.filter(function(o) { return o.name === "instances";}))) {
-              var metricTopN = _.filter(templateSrv.variables, function (o) { return o.name === "instances"; });
-              var metricTopAgg = _.filter(templateSrv.variables, function (o) { return o.name === "topagg"; });
-              isBottomN = templateSrv.variables.filter(function(o) { return o.name === "orientation";})[0].current.value
-              === "bottom" ? true : false;
-              topN = '&topN=' + metricTopN[0].current.value  +'&topNFunction=' + metricTopAgg[0].current.value  + '&isBottomN='+ isBottomN;
-            }
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            var templatedComponent = (_.isEmpty(tComponent)) ? target.app : tComponent;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.metric + metricTransform
-              + metricAggregator + '&hostname=' + target.templatedHost + '&appId=' + templatedComponent + instanceId
-              + '&startTime=' + from + '&endTime=' + to + precision + topN + seriesAggregator }).then(
-              allHostMetricsData(target)
-            );
-          };
-          var getYarnAppIdData = function(target) {
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + encodeURIComponent(target.queue) + metricTransform
-              + metricAggregator + '&appId=resourcemanager' + instanceId + '&startTime=' + from +
-              '&endTime=' + to + precision + seriesAggregator }).then(
-              getMetricsData(target)
-            );
-          };
-          var getHbaseAppIdData = function(target) {
-              var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-              var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.hbMetric + instanceId + '&appId=hbase&startTime='
-            + from + '&endTime=' + to + precision + seriesAggregator }).then(
-              allHostMetricsData(target)
-            );
-          };
-          
-          var getKafkaAppIdData = function(target) {
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.kbMetric + metricTransform + instanceId
-              + metricAggregator + '&appId=kafka_broker&startTime=' + from +
-              '&endTime=' + to + precision + seriesAggregator }).then(
-              getMetricsData(target)
-            );
-          };
-          var getNnAppIdData = function(target) {
-
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.nnMetric + metricTransform + instanceId
-            + metricAggregator + '&appId=namenode&startTime=' + from + '&endTime=' + to + precision + seriesAggregator }).then(
-              allHostMetricsData(target)
-            );
-          };
-
-          // Storm Topology calls.
-          var getStormData = function(target) {
-            var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.sTopoMetric + metricTransform + instanceId
-                + metricAggregator + '&appId=nimbus&startTime=' + from + '&endTime=' + to + precision + seriesAggregator }).then(
-                allHostMetricsData(target)
-            );
-          };
-
-            // Infra Solr Calls
-          var getSolrCoreData = function(target) {
-              var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-              var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-                  + target.precision;
-              var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-              var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-              var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-              return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.sCoreMetric + metricTransform + instanceId
-              + metricAggregator + '&appId=ambari-infra-solr&startTime=' + from + '&endTime=' + to + precision + seriesAggregator }).then(
-                  allHostMetricsData(target)
-              );
-          };
-
-          var getSolrCollectionData = function(target) {
-              var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-              var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-                  + target.precision;
-              var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-              var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-              var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-              return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.sCollectionMetric + metricTransform + instanceId
-              + metricAggregator + '&appId=ambari-infra-solr&startTime=' + from + '&endTime=' + to + precision + seriesAggregator }).then(
-                  allHostMetricsData(target)
-              );
-          };
-
-          // Druid calls.
-          var getDruidData = function(target) {
-            var instanceId = typeof target.templatedCluster == 'undefined'  ? '' : '&instanceId=' + target.templatedCluster;
-            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
-            + target.precision;
-            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
-            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
-            return self.doAmbariRequest({ url: '/ws/v1/timeline/metrics?metricNames=' + target.sDataSourceMetric + metricTransform + instanceId
-                          + metricAggregator + '&appId=druid&startTime=' + from + '&endTime=' + to + precision + seriesAggregator }).then(
-                          allHostMetricsData(target)
-            );
-          };
-
-          // Time Ranges
-          var from = Math.floor(options.range.from.valueOf() / 1000);
-          var to = Math.floor(options.range.to.valueOf() / 1000);
-
-          var metricsPromises = [];
-          if (!_.isEmpty(templateSrv.variables)) {
-            // YARN Queues Dashboard
-            if (templateSrv.variables[0].query === "yarnqueues") {
-              var allQueues = templateSrv.variables.filter(function(variable) { return variable.query === "yarnqueues";});
-              var selectedQs = (_.isEmpty(allQueues)) ? "" : allQueues[0].options.filter(function(q)
-              { return q.selected; }).map(function(qName) { return qName.value; });
-              // All Queues
-              if (!_.isEmpty(_.find(selectedQs, function (wildcard) { return wildcard === "*"; })))  {
-                var allQueue = allQueues[0].options.filter(function(q) {
-                  return q.text !== "All"; }).map(function(queue) { return queue.value; });
-                _.forEach(allQueue, function(processQueue) {
-                  metricsPromises.push(_.map(options.targets, function(target) {
-                    target.qmetric = processQueue;
-                    target.queue = target.metric.replace('root', target.qmetric);
-                    return getYarnAppIdData(target);
-                  }));
-                });
-              } else {
-                // All selected queues.
-                _.forEach(selectedQs, function(processQueue) {
-                  metricsPromises.push(_.map(options.targets, function(target) {
-                    target.qmetric = processQueue;
-                    target.queue = target.metric.replace('root', target.qmetric);
-                    return getYarnAppIdData(target);
-                  }));
-                });
-              }
-            }
-            // Templatized Dashboard for per-user metrics in HBase.
-            if (templateSrv.variables[0].query === "hbase-users") {
-              var allUsers = templateSrv.variables.filter(function(variable) { return variable.query === "hbase-users";});
-              var selectedUsers = (_.isEmpty(allUsers)) ? "" : allUsers[0].options.filter(function(user)
-              { return user.selected; }).map(function(uName) { return uName.value; });
-              selectedUsers = templateSrv._values.Users.lastIndexOf('}') > 0 ? templateSrv._values.Users.slice(1,-1) :
-                templateSrv._values.Users;
-              var selectedUser = selectedUsers.split(',');
-              _.forEach(selectedUser, function(processUser) {
-                  metricsPromises.push(_.map(options.targets, function(target) {
-                    target.hbUser = processUser;
-                    var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
-                    target.hbMetric = target.metric.replace('*', target.hbUser) + metricTransform +'._' +  target.aggregator;
-                    return getHbaseAppIdData(target);
-                  }));
-                });
-            }
-            // Templatized Dashboard for per-table metrics in HBase.
-            if (templateSrv.variables[0].query === "hbase-tables") {
-              var splitTables = [];
-              var allTables = templateSrv._values.Tables.lastIndexOf('}') > 0 ? templateSrv._values.Tables.slice(1,-1) :
-                templateSrv._values.Tables;
-              var allTable = allTables.split(',');
-              while (allTable.length > 0) {
-                splitTables.push(allTable.splice(0,20));
-              }
-              _.forEach(splitTables, function(table) {
-                metricsPromises.push(_.map(options.targets, function(target) {
-                  var hbMetric = [];
-                  _.map(table, function(tableMetric) { hbMetric.push(target.metric.replace('*', tableMetric)); });
-                  var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform; 
-                  hbMetric = _.map(hbMetric, function(tbl) { return tbl + metricTransform +'._' +  target.aggregator; });
-                  target.hbMetric = _.flatten(hbMetric).join(',');
-                  return getHbaseAppIdData(target);
-                }));
-              });
-            }
-            // Templatized Dashboard for per-topic metrics in Kafka.
-            if (templateSrv.variables[0].query === "kafka-topics") {
-              var allTopics = templateSrv.variables.filter(function(variable) { return variable.query === "kafka-topics";});
-              var selectedTopics = (_.isEmpty(allTopics)) ? "" : allTopics[0].options.filter(function(topic)
-              { return topic.selected; }).map(function(topicName) { return topicName.value; });
-              selectedTopics = templateSrv._values.Topics.lastIndexOf('}') > 0 ? templateSrv._values.Topics.slice(1,-1) :
-                templateSrv._values.Topics;
-              var selectedTopic = selectedTopics.split(',');  
-              _.forEach(selectedTopic, function(processTopic) {
-                metricsPromises.push(_.map(options.targets, function(target) {
-                  target.kbTopic = processTopic;
-                  target.kbMetric = target.metric.replace('*', target.kbTopic);
-                  return getKafkaAppIdData(target);
-                }));
-              });
-            }
-            //Templatized Dashboard for Call Queues
-            if (templateSrv.variables[0].query === "callers") {
-              var allCallers = templateSrv.variables.filter(function(variable) { return variable.query === "callers";});
-              var selectedCallers = (_.isEmpty(allCallers)) ? "" : allCallers[0].options.filter(function(user)
-              { return user.selected; }).map(function(callerName) { return callerName.value; });
-              selectedCallers = templateSrv._values.Callers.lastIndexOf('}') > 0 ? templateSrv._values.Callers.slice(1,-1) :
-                templateSrv._values.Callers;
-              var selectedCaller = selectedCallers.split(',');
-              _.forEach(selectedCaller, function(processCaller) {
-                  metricsPromises.push(_.map(options.targets, function(target) {
-                    target.nnCaller = processCaller;
-                    target.nnMetric = target.metric.replace('*', target.nnCaller);
-                    return getNnAppIdData(target);
-                  }));
-              });
-            }
-
-            //Templatized Dashboard for Infra Solr Cores
-            if (templateSrv.variables[0].query === "infra_solr_core") {
-                var allCores = templateSrv.variables.filter(function(variable) { return variable.query === "infra_solr_core";});
-                var selectedCores = (_.isEmpty(allCores)) ? "" : allCores[0].options.filter(function(core)
-                { return core.selected; }).map(function(coreName) { return coreName.value; });
-                selectedCores = templateSrv._values.Cores.lastIndexOf('}') > 0 ? templateSrv._values.Cores.slice(1,-1) :
-                    templateSrv._values.Cores;
-                var selectedCore= selectedCores.split(',');
-                _.forEach(selectedCore, function(processCore) {
-                    metricsPromises.push(_.map(options.targets, function(target) {
-                        target.sCore = processCore;
-                        target.sCoreMetric = target.metric.replace('*', target.sCore);
-                        return getSolrCoreData(target);
-                    }));
-                });
-            }
-
-            //Templatized Dashboard for Infra Solr Collections
-            if (templateSrv.variables[0].query === "infra_solr_collection") {
-                var allCollections = templateSrv.variables.filter(function(variable) { return variable.query === "infra_solr_collection";});
-                var selectedCollections = (_.isEmpty(allCollections)) ? "" : allCollections[0].options.filter(function(collection)
-                { return collection.selected; }).map(function(collectionsName) { return collectionsName.value; });
-                selectedCollections = templateSrv._values.Collections.lastIndexOf('}') > 0 ? templateSrv._values.Collections.slice(1,-1) :
-                    templateSrv._values.Collections;
-                var selectedCollection= selectedCollections.split(',');
-                _.forEach(selectedCollection, function(processCollection) {
-                    metricsPromises.push(_.map(options.targets, function(target) {
-                        target.sCollection = processCollection;
-                        target.sCollectionMetric = target.metric.replace('*', target.sCollection);
-                        return getSolrCollectionData(target);
-                    }));
-                });
-            }
-
-            //Templatized Dashboard for Storm Topologies
-            if (templateSrv.variables[0].query === "topologies" && !templateSrv.variables[1]) {
-              var allTopologies = templateSrv.variables.filter(function(variable) { return variable.query === "topologies";});
-              var selectedTopologies = (_.isEmpty(allTopologies)) ? "" : allTopologies[0].options.filter(function(topo)
-              { return topo.selected; }).map(function(topoName) { return topoName.value; });
-              selectedTopologies = templateSrv._values.topologies.lastIndexOf('}') > 0 ? templateSrv._values.topologies.slice(1,-1) :
-                  templateSrv._values.topologies;
-              var selectedTopology= selectedTopologies.split(',');
-              _.forEach(selectedTopology, function(processTopology) {
-                metricsPromises.push(_.map(options.targets, function(target) {
-                  target.sTopology = processTopology;
-                  target.sTopoMetric = target.metric.replace('*', target.sTopology);
-                  return getStormData(target);
-                }));
-              });
-            }
-
-            //Templatized Dashboards for Storm Components
-            if (templateSrv.variables[0].query === "topologies" && templateSrv.variables[1] &&
-                templateSrv.variables[1].name === "component") {
-              var selectedTopology = templateSrv._values.topologies;
-              var selectedComponent = templateSrv._values.component;
-              metricsPromises.push(_.map(options.targets, function(target) {
-                target.sTopology = selectedTopology;
-                target.sComponent = selectedComponent;
-                target.sTopoMetric = target.metric.replace('*', target.sTopology).replace('*', target.sComponent);
-                  return getStormData(target);
-              }));
-            }
-
-            //Templatized Dashboard for Storm Kafka Offset
-            if (templateSrv.variables[0].query === "topologies" && templateSrv.variables[1] &&
-                templateSrv.variables[1].name === "topic") {
-              var selectedTopology = templateSrv._values.topologies;
-              var selectedTopic = templateSrv._values.topic;
-              metricsPromises.push(_.map(options.targets, function(target) {
-                target.sTopology = selectedTopology;
-                target.sTopic = selectedTopic;
-                target.sPartition = options.scopedVars.partition.value;
-                target.sTopoMetric = target.metric.replace('*', target.sTopology).replace('*', target.sTopic)
-                    .replace('*', target.sPartition);
-                return getStormData(target);
-              }));
-            }
-
-            //Templatized Dashboards for Druid
-            if (templateSrv.variables[0].query === "druidDataSources" && !templateSrv.variables[1]) {
-              var allDataSources = templateSrv.variables.filter(function(variable) { return variable.query === "druidDataSources";});
-              var selectedDataSources = (_.isEmpty(allDataSources)) ? "" : allDataSources[0].options.filter(function(dataSource)
-                            { return dataSource.selected; }).map(function(dataSourceName) { return dataSourceName.value; });
-               selectedDataSources = templateSrv._values.druidDataSources.lastIndexOf('}') > 0 ? templateSrv._values.druidDataSources.slice(1,-1) :
-                                              templateSrv._values.druidDataSources;
-              var selectedDataSource = selectedDataSources.split(',');
-              _.forEach(selectedDataSource, function(processDataSource) {
-                metricsPromises.push(_.map(options.targets, function(target) {
-                  target.sDataSource = processDataSource;
-                  target.sDataSourceMetric = target.metric.replace('*', target.sDataSource);
-                  return getDruidData(target);
-                }));
-              });
-            }
-            // To speed up querying on templatized dashboards.
-              var indexOfHosts = -1;
-              for (var i = 0; i < templateSrv.variables.length; i++) {
-                  if (templateSrv.variables[i].name == 'hosts') {
-                      indexOfHosts = i;
-                  }
-              }
-              if (indexOfHosts >= 0) {
-              var allHosts = templateSrv._values.hosts.lastIndexOf('}') > 0 ? templateSrv._values.hosts.slice(1,-1) :
-              templateSrv._values.hosts;
-              allHosts = templateSrv._texts.hosts === "All" ? '%' : allHosts;
-              metricsPromises.push(_.map(options.targets, function(target) {
-                  target.templatedHost = allHosts? allHosts : '';
-                  target.templatedCluster = templatedCluster;
-                  return getAllHostData(target);
-              }));
-            }
-            metricsPromises = _.flatten(metricsPromises);
-          } else {
-            // Non Templatized Dashboards
-            metricsPromises = _.map(options.targets, function(target) {
-              console.debug('target app=' + target.app + ',' +
-                'target metric=' + target.metric + ' on host=' + target.tempHost);
-              if (!!target.hosts) {
-                return getHostAppIdData(target);
-              } else {
-                return getServiceAppIdData(target);
-              }
-            });
... 77457 lines suppressed ...