You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2016/04/15 18:29:24 UTC

ambari git commit: AMBARI-15902. Refactor Metadata manager for supporting distributed collector. (swagle)

Repository: ambari
Updated Branches:
  refs/heads/trunk ff1b4497b -> e3c981640


AMBARI-15902. Refactor Metadata manager for supporting distributed collector. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e3c98164
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e3c98164
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e3c98164

Branch: refs/heads/trunk
Commit: e3c9816403ebef2f777d48405fb96a43ec46462c
Parents: ff1b449
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Apr 14 16:49:19 2016 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Fri Apr 15 09:29:52 2016 -0700

----------------------------------------------------------------------
 .../timeline/TimelineMetricConfiguration.java   |   3 +
 .../TimelineMetricMetadataManager.java          |  61 ++++++++--
 .../discovery/TimelineMetricMetadataSync.java   |  72 +++++++++++-
 .../timeline/discovery/TestMetadataManager.java |  22 +++-
 .../timeline/discovery/TestMetadataSync.java    | 116 +++++++++++++++++++
 .../server/upgrade/UpgradeCatalog240.java       |  33 ++++--
 .../0.1.0/configuration/ams-site.xml            |   8 ++
 .../templates/hadoop-metrics2.properties.j2     |   4 -
 8 files changed, 288 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index 5b517ef..c0093a0 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -225,6 +225,9 @@ public class TimelineMetricConfiguration {
   public static final String TIMELINE_METRICS_TABLES_DURABILITY =
     "timeline.metrics.tables.durability";
 
+  public static final String TIMELINE_METRIC_METADATA_FILTERS =
+    "timeline.metrics.service.metadata.filters";
+
   public static final String HOST_APP_ID = "HOST";
 
   public static final String DEFAULT_INSTANCE_PORT = "12001";

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
index 8e58203..a39fcb6 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -26,8 +27,11 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -35,10 +39,12 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.regex.Matcher;
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_METRIC_METADATA_MGMT;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRIC_METADATA_FILTERS;
 
 public class TimelineMetricMetadataManager {
   private static final Log LOG = LogFactory.getLog(TimelineMetricMetadataManager.class);
@@ -56,10 +62,19 @@ public class TimelineMetricMetadataManager {
   private PhoenixHBaseAccessor hBaseAccessor;
   private Configuration metricsConf;
 
+  TimelineMetricMetadataSync metricMetadataSync;
+  // Filter metrics names matching given patterns, from metadata
+  final List<String> metricNameFilters = new ArrayList<>();
+
   public TimelineMetricMetadataManager(PhoenixHBaseAccessor hBaseAccessor,
                                        Configuration metricsConf) {
     this.hBaseAccessor = hBaseAccessor;
     this.metricsConf = metricsConf;
+
+    String patternStrings = metricsConf.get(TIMELINE_METRIC_METADATA_FILTERS);
+    if (!StringUtils.isEmpty(patternStrings)) {
+      metricNameFilters.addAll(Arrays.asList(patternStrings.split(",")));
+    }
   }
 
   /**
@@ -69,21 +84,21 @@ public class TimelineMetricMetadataManager {
     if (metricsConf.getBoolean(DISABLE_METRIC_METADATA_MGMT, false)) {
       isDisabled = true;
     } else {
+      metricMetadataSync = new TimelineMetricMetadataSync(this);
       // Schedule the executor to sync to store
-      executorService.scheduleWithFixedDelay(new TimelineMetricMetadataSync(this),
+      executorService.scheduleWithFixedDelay(metricMetadataSync,
         metricsConf.getInt(METRICS_METADATA_SYNC_INIT_DELAY, 120), // 2 minutes
         metricsConf.getInt(METRICS_METADATA_SYNC_SCHEDULE_DELAY, 300), // 5 minutes
         TimeUnit.SECONDS);
       // Read from store and initialize map
       try {
-        Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
-          hBaseAccessor.getTimelineMetricMetadata();
+        Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata = getMetadataFromStore();
 
         LOG.info("Retrieved " + metadata.size() + ", metadata objects from store.");
         // Store in the cache
         METADATA_CACHE.putAll(metadata);
 
-        Map<String, Set<String>> hostedAppData = hBaseAccessor.getHostedAppsMetadata();
+        Map<String, Set<String>> hostedAppData = getHostedAppsFromStore();
 
         LOG.info("Retrieved " + hostedAppData.size() + " host objects from store.");
         HOSTED_APPS_MAP.putAll(hostedAppData);
@@ -111,10 +126,26 @@ public class TimelineMetricMetadataManager {
   }
 
   /**
+   * Test metric name for valid patterns and return true/false
+   */
+  boolean skipMetadataCache(String metricName) {
+    for (String pattern : metricNameFilters) {
+      if (metricName.contains(pattern)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
    * Update value in metadata cache
    * @param metadata @TimelineMetricMetadata
    */
   public void putIfModifiedTimelineMetricMetadata(TimelineMetricMetadata metadata) {
+    if (skipMetadataCache(metadata.getMetricName())) {
+      return;
+    }
+
     TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(
       metadata.getMetricName(), metadata.getAppId());
 
@@ -172,15 +203,27 @@ public class TimelineMetricMetadataManager {
     );
   }
 
+  public boolean isDisabled() {
+    return isDisabled;
+  }
+
+  boolean isDistributedModeEnabled() {
+    return metricsConf.get("timeline.metrics.service.operation.mode", "").equals("distributed");
+  }
+
   /**
-   * Fetch hosted apps from store
+   * Fetch metrics metadata from store
    * @throws SQLException
    */
-  Map<String, Set<String>> getPersistedHostedAppsData() throws SQLException {
-    return hBaseAccessor.getHostedAppsMetadata();
+  Map<TimelineMetricMetadataKey, TimelineMetricMetadata> getMetadataFromStore() throws SQLException {
+    return hBaseAccessor.getTimelineMetricMetadata();
   }
 
-  public boolean isDisabled() {
-    return isDisabled;
+  /**
+   * Fetch hosted apps from store
+   * @throws SQLException
+   */
+  Map<String, Set<String>> getHostedAppsFromStore() throws SQLException {
+    return hBaseAccessor.getHostedAppsMetadata();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
index 54ea200..25b525a 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
@@ -41,6 +41,22 @@ public class TimelineMetricMetadataSync implements Runnable {
 
   @Override
   public void run() {
+    LOG.debug("Persisting metric metadata...");
+    persistMetricMetadata();
+    LOG.debug("Persisting hosted apps metadata...");
+    persistHostAppsMetadata();
+    if (cacheManager.isDistributedModeEnabled()) {
+      LOG.debug("Refreshing metric metadata...");
+      refreshMetricMetadata();
+      LOG.debug("Refreshing hosted apps metadata...");
+      refreshHostAppsMetadata();
+    }
+  }
+
+  /**
+   * Find metrics not persisted to store and persist them
+   */
+  private void persistMetricMetadata() {
     List<TimelineMetricMetadata> metadataToPersist = new ArrayList<>();
     // Find all entries to persist
     for (TimelineMetricMetadata metadata : cacheManager.getMetadataCache().values()) {
@@ -70,11 +86,38 @@ public class TimelineMetricMetadataSync implements Runnable {
         cacheManager.getMetadataCache().put(key, metadata);
       }
     }
-    // Sync hosted apps data is needed
+  }
+
+  /**
+   * Read all metric metadata and update cached values - HA mode
+   */
+  private void refreshMetricMetadata() {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadataFromStore = null;
+    try {
+      metadataFromStore = cacheManager.getMetadataFromStore();
+    } catch (SQLException e) {
+      LOG.warn("Error refreshing metadata from store.", e);
+    }
+    if (metadataFromStore != null) {
+      Map<TimelineMetricMetadataKey, TimelineMetricMetadata> cachedMetadata =
+        cacheManager.getMetadataCache();
+
+      for (Map.Entry<TimelineMetricMetadataKey, TimelineMetricMetadata> metadataEntry : metadataFromStore.entrySet()) {
+        if (!cachedMetadata.containsKey(metadataEntry.getKey())) {
+          cachedMetadata.put(metadataEntry.getKey(), metadataEntry.getValue());
+        }
+      }
+    }
+  }
+
+  /**
+   * Sync hosted apps data if needed
+   */
+  private void persistHostAppsMetadata() {
     if (cacheManager.syncHostedAppsMetadata()) {
       Map<String, Set<String>> persistedData = null;
       try {
-        persistedData = cacheManager.getPersistedHostedAppsData();
+        persistedData = cacheManager.getHostedAppsFromStore();
       } catch (SQLException e) {
         LOG.warn("Failed on fetching hosted apps data from store.", e);
         return; // Something wrong with store
@@ -86,8 +129,8 @@ public class TimelineMetricMetadataSync implements Runnable {
         for (Map.Entry<String, Set<String>> cacheEntry : cachedData.entrySet()) {
           // No persistence / stale data in store
           if (persistedData == null || persistedData.isEmpty() ||
-              !persistedData.containsKey(cacheEntry.getKey()) ||
-              !persistedData.get(cacheEntry.getKey()).containsAll(cacheEntry.getValue())) {
+            !persistedData.containsKey(cacheEntry.getKey()) ||
+            !persistedData.get(cacheEntry.getKey()).containsAll(cacheEntry.getValue())) {
             dataToSync.put(cacheEntry.getKey(), cacheEntry.getValue());
           }
         }
@@ -102,4 +145,25 @@ public class TimelineMetricMetadataSync implements Runnable {
 
     }
   }
+
+  /**
+   * Read all hosted apps metadata and update cached values - HA
+   */
+  private void refreshHostAppsMetadata() {
+    Map<String, Set<String>> hostedAppsDataFromStore = null;
+    try {
+      hostedAppsDataFromStore = cacheManager.getHostedAppsFromStore();
+    } catch (SQLException e) {
+      LOG.warn("Error refreshing metadata from store.", e);
+    }
+    if (hostedAppsDataFromStore != null) {
+      Map<String, Set<String>> cachedData = cacheManager.getHostedAppsCache();
+
+      for (Map.Entry<String, Set<String>> storeEntry : hostedAppsDataFromStore.entrySet()) {
+        if (!cachedData.containsKey(storeEntry.getKey())) {
+          cachedData.put(storeEntry.getKey(), storeEntry.getValue());
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
index bf887be..2b33cb95 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
@@ -23,18 +23,36 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractMiniHBaseClusterTest;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.easymock.EasyMock;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
 
 import java.io.IOException;
 import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.GAUGE;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
 public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
   TimelineMetricMetadataManager metadataManager;
 
+
+
+
   @Before
   public void insertDummyRecords() throws IOException, SQLException {
     // Initialize new manager
@@ -100,13 +118,11 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
     Assert.assertEquals(value2, savedData.get(key2));
 
     Map<String, Set<String>> cachedHostData = metadataManager.getHostedAppsCache();
-    Map<String, Set<String>> savedHostData = metadataManager.getPersistedHostedAppsData();
+    Map<String, Set<String>> savedHostData = metadataManager.getHostedAppsFromStore();
     Assert.assertEquals(cachedData.size(), savedData.size());
     Assert.assertEquals("dummy_app1", cachedHostData.get("dummy_host1").iterator().next());
     Assert.assertEquals("dummy_app2", cachedHostData.get("dummy_host2").iterator().next());
     Assert.assertEquals("dummy_app1", savedHostData.get("dummy_host1").iterator().next());
     Assert.assertEquals("dummy_app2", savedHostData.get("dummy_host2").iterator().next());
   }
-
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
new file mode 100644
index 0000000..78f2bfe
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.junit.Test;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.GAUGE;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRIC_METADATA_FILTERS;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+public class TestMetadataSync {
+  @Test
+  public void testRefreshMetadataOnWrite() throws Exception {
+    Configuration configuration = createNiceMock(Configuration.class);
+    PhoenixHBaseAccessor hBaseAccessor = createNiceMock(PhoenixHBaseAccessor.class);
+
+    final TimelineMetricMetadata testMetadata1 = new TimelineMetricMetadata(
+      "m1", "a1", "", GAUGE.name(), System.currentTimeMillis(), true);
+    final TimelineMetricMetadata testMetadata2 = new TimelineMetricMetadata(
+      "m2", "a2", "", GAUGE.name(), System.currentTimeMillis(), true);
+
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+      new HashMap<TimelineMetricMetadataKey, TimelineMetricMetadata>() {{
+        put(new TimelineMetricMetadataKey("m1", "a1"), testMetadata1);
+        put(new TimelineMetricMetadataKey("m2", "a2"), testMetadata2);
+      }};
+
+    Map<String, Set<String>> hostedApps = new HashMap<String, Set<String>>() {{
+      put("h1", new HashSet<>(Arrays.asList("a1")));
+      put("h2", new HashSet<>(Arrays.asList("a1", "a2")));
+    }};
+
+    expect(configuration.get("timeline.metrics.service.operation.mode", "")).andReturn("distributed");
+    expect(hBaseAccessor.getTimelineMetricMetadata()).andReturn(metadata);
+    expect(hBaseAccessor.getHostedAppsMetadata()).andReturn(hostedApps);
+
+    replay(configuration, hBaseAccessor);
+
+    TimelineMetricMetadataManager metadataManager = new
+      TimelineMetricMetadataManager(hBaseAccessor, configuration);
+
+    metadataManager.metricMetadataSync = new TimelineMetricMetadataSync(metadataManager);
+
+    metadataManager.metricMetadataSync.run();
+
+    verify(configuration, hBaseAccessor);
+
+    metadata = metadataManager.getMetadataCache();
+    Assert.assertEquals(2, metadata.size());
+    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m1", "a1")));
+    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m2", "a2")));
+
+    hostedApps = metadataManager.getHostedAppsCache();
+    Assert.assertEquals(2, hostedApps.size());
+    Assert.assertEquals(1, hostedApps.get("h1").size());
+    Assert.assertEquals(2, hostedApps.get("h2").size());
+  }
+
+  @Test
+  public void testFilterByRegexOnMetricName() throws Exception {
+    Configuration configuration = createNiceMock(Configuration.class);
+    PhoenixHBaseAccessor hBaseAccessor = createNiceMock(PhoenixHBaseAccessor.class);
+
+    TimelineMetricMetadata metadata1 = new TimelineMetricMetadata(
+      "xxx.abc.yyy", "a1", "", GAUGE.name(), System.currentTimeMillis(), true);
+    TimelineMetricMetadata metadata2 = new TimelineMetricMetadata(
+      "xxx.cdef.yyy", "a2", "", GAUGE.name(), System.currentTimeMillis(), true);
+    TimelineMetricMetadata metadata3 = new TimelineMetricMetadata(
+      "xxx.pqr.zzz", "a3", "", GAUGE.name(), System.currentTimeMillis(), true);
+
+    expect(configuration.get(TIMELINE_METRIC_METADATA_FILTERS)).andReturn("abc,cde");
+
+    replay(configuration, hBaseAccessor);
+
+    TimelineMetricMetadataManager metadataManager = new
+      TimelineMetricMetadataManager(hBaseAccessor, configuration);
+
+    metadataManager.putIfModifiedTimelineMetricMetadata(metadata1);
+    metadataManager.putIfModifiedTimelineMetricMetadata(metadata2);
+    metadataManager.putIfModifiedTimelineMetricMetadata(metadata3);
+
+    verify(configuration, hBaseAccessor);
+
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+      metadataManager.getMetadataCache();
+
+    Assert.assertEquals(1, metadata.size());
+    Assert.assertEquals("xxx.pqr.zzz", metadata.keySet().iterator().next().getMetricName());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index ddaff84..1a82698 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -50,6 +50,7 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.State;
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.jdbc.support.JdbcUtils;
@@ -169,7 +170,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
   private void updateViewInstanceEntityTable() throws SQLException {
     dbAccessor.addColumn(VIEWINSTANCE_TABLE,
-            new DBColumnInfo(SHORT_URL_COLUMN, String.class, 255, null, true));
+      new DBColumnInfo(SHORT_URL_COLUMN, String.class, 255, null, true));
   }
 
   private void updateClusterTableDDL() throws SQLException {
@@ -266,8 +267,8 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
     permissionId = permissionDAO.findPermissionByNameAndType("CLUSTER.OPERATOR",
       resourceTypeDAO.findByName("CLUSTER")).getId().toString();
-    dbAccessor.insertRowIfMissing("permission_roleauthorization", new String[]{"permission_id", "authorization_id"},
-            new String[]{"'" + permissionId + "'", "'CLUSTER.MANAGE_USER_PERSISTED_DATA'"}, false);
+    dbAccessor.insertRowIfMissing("permission_roleauthorization", new String[]{"permission_id", "authorization_id" },
+      new String[]{"'" + permissionId + "'", "'CLUSTER.MANAGE_USER_PERSISTED_DATA'" }, false);
 
     permissionId = permissionDAO.findPermissionByNameAndType("AMBARI.ADMINISTRATOR",
       resourceTypeDAO.findByName("AMBARI")).getId().toString();
@@ -818,10 +819,10 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
    */
   protected void updateAlertCurrentTable() throws SQLException {
     dbAccessor.addColumn(ALERT_CURRENT_TABLE,
-            new DBColumnInfo(ALERT_CURRENT_OCCURRENCES_COLUMN, Long.class, null, 1, false));
+      new DBColumnInfo(ALERT_CURRENT_OCCURRENCES_COLUMN, Long.class, null, 1, false));
 
     dbAccessor.addColumn(ALERT_CURRENT_TABLE, new DBColumnInfo(ALERT_CURRENT_FIRMNESS_COLUMN,
-            String.class, 255, AlertFirmness.HARD.name(), false));
+      String.class, 255, AlertFirmness.HARD.name(), false));
   }
 
   protected void setRoleSortOrder() throws SQLException {
@@ -833,15 +834,15 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     dbAccessor.executeUpdate(String.format(updateStatement,
         2, PermissionEntity.CLUSTER_ADMINISTRATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-            3, PermissionEntity.CLUSTER_OPERATOR_PERMISSION_NAME));
+      3, PermissionEntity.CLUSTER_OPERATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-            4, PermissionEntity.SERVICE_ADMINISTRATOR_PERMISSION_NAME));
+      4, PermissionEntity.SERVICE_ADMINISTRATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-            5, PermissionEntity.SERVICE_OPERATOR_PERMISSION_NAME));
+      5, PermissionEntity.SERVICE_OPERATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-            6, PermissionEntity.CLUSTER_USER_PERMISSION_NAME));
+      6, PermissionEntity.CLUSTER_USER_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-            7, PermissionEntity.VIEW_USER_PERMISSION_NAME));
+      7, PermissionEntity.VIEW_USER_PERMISSION_NAME));
   }
 
   /**
@@ -995,7 +996,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
         "from_stack_id", STACK_TABLE, "stack_id", false);
 
     dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_to_stack_id",
-            "to_stack_id", STACK_TABLE, "stack_id", false);
+      "to_stack_id", STACK_TABLE, "stack_id", false);
 
     addSequence("servicecomponent_history_id_seq", 0L, false);
   }
@@ -1108,6 +1109,16 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
             updateConfigurationPropertiesForCluster(cluster, "ams-hbase-env", newProperties, true, true);
           }
+
+          Config amsSite = cluster.getDesiredConfigByType("ams-site");
+          if (amsSite != null) {
+            String metadataFilters = amsSite.getProperties().get("timeline.metrics.service.metadata.filters");
+            if (StringUtils.isEmpty(metadataFilters) ||
+                !metadataFilters.contains("ContainerResource")) {
+              updateConfigurationProperties("ams-site",
+                Collections.singletonMap("timeline.metrics.service.metadata.filters", "ContainerResource"), true, false);
+            }
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
index e5758bf..a7fd9c8 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
@@ -577,5 +577,13 @@
       Enable Linear interpolation for missing slices of data, while aggregating.
     </description>
   </property>
+  <property>
+    <name>timeline.metrics.service.metadata.filters</name>
+    <value>ContainerResource</value>
+    <description>
+      Commas separated list of regular expressions that match metric names
+      which prevents certain metrics from ending up in metadata cache.
+    </description>
+  </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c98164/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
index 6f32000..fcd9b23 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
@@ -101,8 +101,4 @@ namenode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}
 namenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}
 {% endif %}
 
-# Switch off container metrics
-*.source.filter.class=org.apache.hadoop.metrics2.filter.GlobFilter
-nodemanager.*.source.filter.exclude=*ContainerResource*
-
 {% endif %}