You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/06/12 16:45:08 UTC

[01/50] [abbrv] ambari git commit: AMBARI-21138. Incorrect setting of java path for druid service (slim bouguerra via smohanty)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-12556 871f85b34 -> 382da9799


AMBARI-21138. Incorrect setting of java path for druid service (slim bouguerra via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/499814af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/499814af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/499814af

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 499814af75d3adfa7c5afcd1564ed79e16cb30a0
Parents: b916e37
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Mon Jun 5 15:57:09 2017 -0700
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Mon Jun 5 15:57:09 2017 -0700

----------------------------------------------------------------------
 .../common-services/DRUID/0.9.2/configuration/druid-env.xml        | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/499814af/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-env.xml b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-env.xml
index 539ef0c..23b82e3 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-env.xml
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-env.xml
@@ -226,7 +226,7 @@
 
       # The java implementation to use.
       export JAVA_HOME={{java8_home}}
-      export PATH=$PATH:$JAVA_HOME/bin
+      export PATH=$JAVA_HOME/bin:$PATH
       export DRUID_PID_DIR={{druid_pid_dir}}
       export DRUID_LOG_DIR={{druid_log_dir}}
       export DRUID_CONF_DIR={{druid_conf_dir}}


[37/50] [abbrv] ambari git commit: AMBARI-21128 Add AMS HA support to local metrics aggregator application (dsen)

Posted by nc...@apache.org.
AMBARI-21128 Add AMS HA support to local metrics aggregator application (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29f75089
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29f75089
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29f75089

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 29f750894754ed2112fcedaa0b2f5ec693b5cd0e
Parents: 190ecad
Author: Dmytro Sen <ds...@apache.org>
Authored: Fri Jun 9 14:36:11 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Fri Jun 9 14:36:11 2017 +0300

----------------------------------------------------------------------
 .../timeline/AbstractTimelineMetricsSink.java   |   4 +-
 .../ambari-metrics-host-aggregator/pom.xml      |  30 +++-
 .../AbstractMetricPublisherThread.java          | 134 ---------------
 .../aggregator/AggregatedMetricsPublisher.java  | 101 -----------
 .../host/aggregator/AggregatorApplication.java  |  98 +++++++----
 .../host/aggregator/AggregatorWebService.java   |   2 +-
 .../host/aggregator/RawMetricsPublisher.java    |  60 -------
 .../host/aggregator/TimelineMetricsHolder.java  |  26 ++-
 .../sink/timeline/AbstractMetricPublisher.java  | 169 +++++++++++++++++++
 .../timeline/AggregatedMetricsPublisher.java    | 103 +++++++++++
 .../sink/timeline/RawMetricsPublisher.java      |  65 +++++++
 .../aggregator/AggregatorApplicationTest.java   |  55 ++++++
 .../aggregator/AggregatorWebServiceTest.java    | 135 +++++++++++++++
 .../aggregator/TimelineMetricsHolderTest.java   | 107 ++++++++++++
 .../timeline/AbstractMetricPublisherTest.java   |  82 +++++++++
 .../AggregatedMetricsPublisherTest.java         | 154 +++++++++++++++++
 .../sink/timeline/RawMetricsPublisherTest.java  | 151 +++++++++++++++++
 .../src/main/python/core/aggregator.py          |   6 +-
 .../src/main/python/core/controller.py          |   2 +-
 19 files changed, 1133 insertions(+), 351 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index fddf4b3..644d978 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -88,7 +88,7 @@ public abstract class AbstractTimelineMetricsSink {
   private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
   private static final String NEGOTIATE = "Negotiate";
 
-  protected static final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
+  protected final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
   public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
   protected static final AtomicInteger nullCollectorCounter = new AtomicInteger(0);
   public static int NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS = 20;
@@ -120,7 +120,7 @@ public abstract class AbstractTimelineMetricsSink {
   private volatile boolean isInitializedForHA = false;
 
   @SuppressWarnings("all")
-  private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 5;
+  private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 3;
 
   private final Gson gson = new Gson();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/pom.xml b/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
index 0598bef..24432dd 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
+++ b/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
@@ -38,12 +38,6 @@
 
     <dependencies>
         <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>3.8.1</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
             <version>14.0.1</version>
@@ -83,6 +77,30 @@
             <artifactId>hadoop-common</artifactId>
             <version>2.7.1.2.3.4.0-3347</version>
         </dependency>
+        <dependency>
+            <groupId>com.sun.jersey.jersey-test-framework</groupId>
+            <artifactId>jersey-test-framework-core</artifactId>
+            <version>1.11</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.jersey.jersey-test-framework</groupId>
+            <artifactId>jersey-test-framework-grizzly2</artifactId>
+            <version>1.11</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <version>3.4</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.2</version>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
     <build>

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java
deleted file mode 100644
index b1f60fa..0000000
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.host.aggregator;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.codehaus.jackson.map.AnnotationIntrospector;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Map;
-
-/**
- * Abstract class that runs a thread that publishes metrics data to AMS collector in specified intervals.
- */
-public abstract class AbstractMetricPublisherThread extends Thread {
-    protected int publishIntervalInSeconds;
-    protected String publishURL;
-    protected ObjectMapper objectMapper;
-    private Log LOG;
-    protected TimelineMetricsHolder timelineMetricsHolder;
-
-    public AbstractMetricPublisherThread(TimelineMetricsHolder timelineMetricsHolder, String publishURL, int publishIntervalInSeconds) {
-        LOG = LogFactory.getLog(this.getClass());
-        this.publishURL = publishURL;
-        this.publishIntervalInSeconds = publishIntervalInSeconds;
-        this.timelineMetricsHolder = timelineMetricsHolder;
-        objectMapper = new ObjectMapper();
-        AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
-        objectMapper.setAnnotationIntrospector(introspector);
-        objectMapper.getSerializationConfig()
-                .withSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
-    }
-
-    /**
-     * Publishes metrics to collector in specified intervals while not interrupted.
-     */
-    @Override
-    public void run() {
-        while (!isInterrupted()) {
-            try {
-                sleep(this.publishIntervalInSeconds * 1000);
-            } catch (InterruptedException e) {
-                //Ignore
-            }
-            try {
-                processAndPublishMetrics(getMetricsFromCache());
-            } catch (Exception e) {
-                LOG.error("Couldn't process and send metrics : ",e);
-            }
-        }
-    }
-
-    /**
-     * Processes and sends metrics to collector.
-     * @param metricsFromCache
-     * @throws Exception
-     */
-    protected void processAndPublishMetrics(Map<Long, TimelineMetrics> metricsFromCache) throws Exception {
-        if (metricsFromCache.size()==0) return;
-
-        LOG.info(String.format("Preparing %s timeline metrics for publishing", metricsFromCache.size()));
-        publishMetricsJson(processMetrics(metricsFromCache));
-    }
-
-    /**
-     * Returns metrics map. Source is based on implementation.
-     * @return
-     */
-    protected abstract Map<Long,TimelineMetrics> getMetricsFromCache();
-
-    /**
-     * Processes given metrics (aggregates or merges them) and converts them into json string that will be send to collector
-     * @param metricValues
-     * @return
-     */
-    protected abstract String processMetrics(Map<Long, TimelineMetrics> metricValues);
-
-    protected void publishMetricsJson(String jsonData) throws Exception {
-        int timeout = 5 * 1000;
-        HttpURLConnection connection = null;
-        if (this.publishURL == null) {
-            throw new IOException("Unknown URL. Unable to connect to metrics collector.");
-        }
-        LOG.info("Collector URL : " + publishURL);
-        connection = (HttpURLConnection) new URL(this.publishURL).openConnection();
-
-        connection.setRequestMethod("POST");
-        connection.setRequestProperty("Content-Type", "application/json");
-        connection.setRequestProperty("Connection", "Keep-Alive");
-        connection.setConnectTimeout(timeout);
-        connection.setReadTimeout(timeout);
-        connection.setDoOutput(true);
-
-        if (jsonData != null) {
-            try (OutputStream os = connection.getOutputStream()) {
-                os.write(jsonData.getBytes("UTF-8"));
-            }
-        }
-        int responseCode = connection.getResponseCode();
-        if (responseCode != 200) {
-            throw new Exception("responseCode is " + responseCode);
-        }
-        LOG.info("Successfully sent metrics.");
-    }
-
-    /**
-     * Interrupts the thread.
-     */
-    protected void stopPublisher() {
-        this.interrupt();
-    }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java
deleted file mode 100644
index 0540ec9..0000000
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.host.aggregator;
-
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
-import org.apache.hadoop.metrics2.sink.timeline.MetricHostAggregate;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricWithAggregatedValues;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
-/**
- * Thread that aggregates and publishes metrics to collector on specified interval.
- */
-public class AggregatedMetricsPublisher extends AbstractMetricPublisherThread {
-
-    private Log LOG;
-
-    public AggregatedMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, String collectorURL, int interval) {
-        super(timelineMetricsHolder, collectorURL, interval);
-        LOG = LogFactory.getLog(this.getClass());
-    }
-
-    /**
-     * get metrics map form @TimelineMetricsHolder
-     * @return
-     */
-    @Override
-    protected Map<Long, TimelineMetrics> getMetricsFromCache() {
-        return timelineMetricsHolder.extractMetricsForAggregationPublishing();
-    }
-
-    /**
-     * Aggregates given metrics and converts them into json string that will be send to collector
-     * @param metricForAggregationValues
-     * @return
-     */
-    @Override
-    protected String processMetrics(Map<Long, TimelineMetrics> metricForAggregationValues) {
-        HashMap<String, TimelineMetrics> nameToMetricMap = new HashMap<>();
-        for (TimelineMetrics timelineMetrics : metricForAggregationValues.values()) {
-            for (TimelineMetric timelineMetric : timelineMetrics.getMetrics()) {
-                if (!nameToMetricMap.containsKey(timelineMetric.getMetricName())) {
-                    nameToMetricMap.put(timelineMetric.getMetricName(), new TimelineMetrics());
-                }
-                nameToMetricMap.get(timelineMetric.getMetricName()).addOrMergeTimelineMetric(timelineMetric);
-            }
-        }
-        Set<TimelineMetricWithAggregatedValues> metricAggregateMap = new HashSet<>();
-        for (TimelineMetrics metrics : nameToMetricMap.values()) {
-            double sum = 0;
-            double max = Integer.MIN_VALUE;
-            double min = Integer.MAX_VALUE;
-            int count = 0;
-            for (TimelineMetric metric : metrics.getMetrics()) {
-                for (Double value : metric.getMetricValues().values()) {
-                    sum+=value;
-                    max = Math.max(max, value);
-                    min = Math.min(min, value);
-                    count++;
-                }
-            }
-            TimelineMetric tmpMetric = new TimelineMetric(metrics.getMetrics().get(0));
-            tmpMetric.setMetricValues(new TreeMap<Long, Double>());
-            metricAggregateMap.add(new TimelineMetricWithAggregatedValues(tmpMetric, new MetricHostAggregate(sum, count, 0d, max, min)));
-        }
-        String json = null;
-        try {
-            json = objectMapper.writeValueAsString(new AggregationResult(metricAggregateMap, System.currentTimeMillis()));
-            LOG.debug(json);
-        } catch (Exception e) {
-            LOG.error("Failed to convert result into json", e);
-        }
-
-        return json;
-    }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
index c6b703b..1e5cc82 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
@@ -33,6 +33,9 @@ import java.util.HashMap;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.AbstractMetricPublisher;
+import org.apache.hadoop.metrics2.sink.timeline.AggregatedMetricsPublisher;
+import org.apache.hadoop.metrics2.sink.timeline.RawMetricsPublisher;
 
 /**
  * WEB application with 2 publisher threads that processes received metrics and submits results to the collector
@@ -40,24 +43,25 @@ import org.apache.hadoop.conf.Configuration;
 public class AggregatorApplication
 {
     private static final int STOP_SECONDS_DELAY = 0;
-    private static final int JOIN_SECONDS_TIMEOUT = 2;
-    private static String BASE_POST_URL = "%s://%s:%s/ws/v1/timeline/metrics";
-    private static String AGGREGATED_POST_PREFIX = "/aggregated";
+    private static final int JOIN_SECONDS_TIMEOUT = 5;
     private static final String METRICS_SITE_CONFIGURATION_FILE = "ams-site.xml";
-    private static Log LOG = LogFactory.getLog("AggregatorApplication.class");
+    private Log LOG;
     private final int webApplicationPort;
     private final int rawPublishingInterval;
     private final int aggregationInterval;
     private Configuration configuration;
-    private String [] collectorHosts;
-    private AggregatedMetricsPublisher aggregatePublisher;
-    private RawMetricsPublisher rawPublisher;
+    private Thread aggregatePublisherThread;
+    private Thread rawPublisherThread;
     private TimelineMetricsHolder timelineMetricsHolder;
     private HttpServer httpServer;
 
-    public AggregatorApplication(String collectorHosts) {
+    public AggregatorApplication(String hostname, String collectorHosts) {
+        LOG = LogFactory.getLog(this.getClass());
+        configuration = new Configuration(true);
         initConfiguration();
-        this.collectorHosts = collectorHosts.split(",");
+        configuration.set("timeline.metrics.collector.hosts", collectorHosts);
+        configuration.set("timeline.metrics.hostname", hostname);
+        configuration.set("timeline.metrics.zk.quorum", getZkQuorumFromConfiguration());
         this.aggregationInterval = configuration.getInt("timeline.metrics.host.aggregator.minute.interval", 300);
         this.rawPublishingInterval = configuration.getInt("timeline.metrics.sink.report.interval", 60);
         this.webApplicationPort = configuration.getInt("timeline.metrics.host.inmemory.aggregation.port", 61888);
@@ -70,7 +74,13 @@ public class AggregatorApplication
         }
     }
 
-    private void initConfiguration() {
+    private String getZkQuorumFromConfiguration() {
+        String zkClientPort = configuration.getTrimmed("cluster.zookeeper.property.clientPort", "2181");
+        String zkServerHosts = configuration.getTrimmed("cluster.zookeeper.quorum", "");
+        return getZkConnectionUrl(zkClientPort, zkServerHosts);
+    }
+
+    protected void initConfiguration() {
         ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
         if (classLoader == null) {
             classLoader = getClass().getClassLoader();
@@ -82,7 +92,7 @@ public class AggregatorApplication
             throw new IllegalStateException("Unable to initialize the metrics " +
                     "subsystem. No ams-site present in the classpath.");
         }
-        configuration = new Configuration(true);
+
         try {
             configuration.addResource(amsResUrl.toURI().toURL());
         } catch (Exception e) {
@@ -91,7 +101,7 @@ public class AggregatorApplication
         }
     }
 
-    private String getHostName() {
+    protected String getHostName() {
         String hostName = "localhost";
         try {
             hostName = InetAddress.getLocalHost().getCanonicalHostName();
@@ -101,13 +111,13 @@ public class AggregatorApplication
         return hostName;
     }
 
-    private URI getURI() {
+    protected URI getURI() {
         URI uri = UriBuilder.fromUri("http://" + getHostName() + "/").port(this.webApplicationPort).build();
         LOG.info(String.format("Web server at %s", uri));
         return uri;
     }
 
-    private HttpServer createHttpServer() throws IOException {
+    protected HttpServer createHttpServer() throws IOException {
         ResourceConfig resourceConfig = new PackagesResourceConfig("org.apache.hadoop.metrics2.host.aggregator");
         HashMap<String, Object> params = new HashMap();
         params.put("com.sun.jersey.api.json.POJOMappingFeature", "true");
@@ -122,29 +132,30 @@ public class AggregatorApplication
 
     private void startAggregatePublisherThread() {
         LOG.info("Starting aggregated metrics publisher.");
-        String collectorURL = buildBasicCollectorURL(collectorHosts[0]) + AGGREGATED_POST_PREFIX;
-        aggregatePublisher = new AggregatedMetricsPublisher(timelineMetricsHolder, collectorURL, aggregationInterval);
-        aggregatePublisher.start();
+        AbstractMetricPublisher metricPublisher = new AggregatedMetricsPublisher(timelineMetricsHolder, configuration, aggregationInterval);
+        aggregatePublisherThread = new Thread(metricPublisher);
+        aggregatePublisherThread.start();
     }
 
     private void startRawPublisherThread() {
         LOG.info("Starting raw metrics publisher.");
-        String collectorURL = buildBasicCollectorURL(collectorHosts[0]);
-        rawPublisher = new RawMetricsPublisher(timelineMetricsHolder, collectorURL, rawPublishingInterval);
-        rawPublisher.start();
+        AbstractMetricPublisher metricPublisher = new RawMetricsPublisher(timelineMetricsHolder, configuration, rawPublishingInterval);
+        rawPublisherThread = aggregatePublisherThread = new Thread(metricPublisher);
+        aggregatePublisherThread.start();
     }
 
 
 
     private void stop() {
-        aggregatePublisher.stopPublisher();
-        rawPublisher.stopPublisher();
+        LOG.info("Stopping aggregator application");
+        aggregatePublisherThread.interrupt();
+        rawPublisherThread.interrupt();
         httpServer.stop(STOP_SECONDS_DELAY);
         LOG.info("Stopped web server.");
         try {
             LOG.info("Waiting for threads to join.");
-            aggregatePublisher.join(JOIN_SECONDS_TIMEOUT * 1000);
-            rawPublisher.join(JOIN_SECONDS_TIMEOUT * 1000);
+            aggregatePublisherThread.join(JOIN_SECONDS_TIMEOUT * 1000);
+            rawPublisherThread.join(JOIN_SECONDS_TIMEOUT * 1000);
             LOG.info("Gracefully stopped Aggregator Application.");
         } catch (InterruptedException e) {
             LOG.error("Received exception during stop : ", e);
@@ -153,28 +164,43 @@ public class AggregatorApplication
 
     }
 
-    private String buildBasicCollectorURL(String host) {
-        String port = configuration.get("timeline.metrics.service.webapp.address", "0.0.0.0:6188").split(":")[1];
-        String protocol = configuration.get("timeline.metrics.service.http.policy", "HTTP_ONLY").equalsIgnoreCase("HTTP_ONLY") ? "http" : "https";
-        return String.format(BASE_POST_URL, protocol, host, port);
+    private String getZkConnectionUrl(String zkClientPort, String zkQuorum) {
+        StringBuilder sb = new StringBuilder();
+        String[] quorumParts = zkQuorum.split(",");
+        String prefix = "";
+        for (String part : quorumParts) {
+            sb.append(prefix);
+            sb.append(part.trim());
+            if (!part.contains(":")) {
+                sb.append(":");
+                sb.append(zkClientPort);
+            }
+            prefix = ",";
+        }
+        return sb.toString();
     }
 
     public static void main( String[] args ) throws Exception {
-        LOG.info("Starting aggregator application");
-        if (args.length != 1) {
-            throw new Exception("This jar should be run with 1 argument - collector hosts separated with coma");
+        if (args.length != 2) {
+            throw new Exception("This jar should be executed with 2 arguments : 1st - current host name, " +
+                    "2nd - collector hosts separated with coma");
         }
 
-        final AggregatorApplication app = new AggregatorApplication(args[0]);
-        app.startAggregatePublisherThread();
-        app.startRawPublisherThread();
-        app.startWebServer();
+        final AggregatorApplication app = new AggregatorApplication(args[0], args[1]);
+
+        app.startWebServerAndPublishersThreads();
 
         Runtime.getRuntime().addShutdownHook(new Thread() {
             public void run() {
-                LOG.info("Stopping aggregator application");
                 app.stop();
             }
         });
     }
+
+    private void startWebServerAndPublishersThreads() {
+        LOG.info("Starting aggregator application");
+        startAggregatePublisherThread();
+        startRawPublisherThread();
+        startWebServer();
+    }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
index f96d0ed..b151209 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
@@ -39,7 +39,7 @@ public class AggregatorWebService {
     @GET
     @Produces("text/json")
     @Path("/metrics")
-    public Response helloWorld() throws IOException {
+    public Response getOkResponse() throws IOException {
         return Response.ok().build();
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java
deleted file mode 100644
index f317ed9..0000000
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.host.aggregator;
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.util.Map;
-
-public class RawMetricsPublisher extends AbstractMetricPublisherThread {
-    private final Log LOG;
-
-    public RawMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, String collectorURL, int interval) {
-        super(timelineMetricsHolder, collectorURL, interval);
-        LOG = LogFactory.getLog(this.getClass());
-    }
-
-
-    @Override
-    protected Map<Long, TimelineMetrics> getMetricsFromCache() {
-        return timelineMetricsHolder.extractMetricsForRawPublishing();
-    }
-
-    @Override
-    protected String processMetrics(Map<Long, TimelineMetrics> metricValues) {
-        //merge everything in one TimelineMetrics object
-        TimelineMetrics timelineMetrics = new TimelineMetrics();
-        for (TimelineMetrics metrics : metricValues.values()) {
-            for (TimelineMetric timelineMetric : metrics.getMetrics())
-                timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
-        }
-        //map TimelineMetrics to json string
-        String json = null;
-        try {
-            json = objectMapper.writeValueAsString(timelineMetrics);
-            LOG.debug(json);
-        } catch (Exception e) {
-            LOG.error("Failed to convert result into json", e);
-        }
-        return json;
-    }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
index b355c97..03b6542 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
@@ -19,8 +19,10 @@ package org.apache.hadoop.metrics2.host.aggregator;
 
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 
+import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
@@ -33,8 +35,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 public class TimelineMetricsHolder {
     private static final int DEFAULT_RAW_CACHE_EXPIRE_TIME = 60;
     private static final int DEFAULT_AGGREGATION_CACHE_EXPIRE_TIME = 300;
-    private Cache<Long, TimelineMetrics> aggregationMetricsCache;
-    private Cache<Long, TimelineMetrics> rawMetricsCache;
+    private Cache<String, TimelineMetrics> aggregationMetricsCache;
+    private Cache<String, TimelineMetrics> rawMetricsCache;
     private static TimelineMetricsHolder instance = null;
     //to ensure no metric values are expired
     private static int EXPIRE_DELAY = 30;
@@ -63,21 +65,29 @@ public class TimelineMetricsHolder {
 
     public void putMetricsForAggregationPublishing(TimelineMetrics timelineMetrics) {
         aggregationCacheLock.writeLock().lock();
-        aggregationMetricsCache.put(System.currentTimeMillis(), timelineMetrics);
+        aggregationMetricsCache.put(calculateCacheKey(timelineMetrics), timelineMetrics);
         aggregationCacheLock.writeLock().unlock();
     }
 
-    public Map<Long, TimelineMetrics> extractMetricsForAggregationPublishing() {
+    private String calculateCacheKey(TimelineMetrics timelineMetrics) {
+        List<TimelineMetric>  metrics =  timelineMetrics.getMetrics();
+        if (metrics.size() > 0) {
+            return  metrics.get(0).getAppId() + System.currentTimeMillis();
+        }
+        return String.valueOf(System.currentTimeMillis());
+    }
+
+    public Map<String, TimelineMetrics> extractMetricsForAggregationPublishing() {
         return extractMetricsFromCacheWithLock(aggregationMetricsCache, aggregationCacheLock);
     }
 
     public void putMetricsForRawPublishing(TimelineMetrics metrics) {
         rawCacheLock.writeLock().lock();
-        rawMetricsCache.put(System.currentTimeMillis(), metrics);
+        rawMetricsCache.put(calculateCacheKey(metrics), metrics);
         rawCacheLock.writeLock().unlock();
     }
 
-    public Map<Long, TimelineMetrics> extractMetricsForRawPublishing() {
+    public Map<String, TimelineMetrics> extractMetricsForRawPublishing() {
         return extractMetricsFromCacheWithLock(rawMetricsCache, rawCacheLock);
     }
 
@@ -87,9 +97,9 @@ public class TimelineMetricsHolder {
      * @param lock
      * @return
      */
-    private Map<Long, TimelineMetrics> extractMetricsFromCacheWithLock(Cache<Long, TimelineMetrics> cache, ReadWriteLock lock) {
+    private Map<String, TimelineMetrics> extractMetricsFromCacheWithLock(Cache<String, TimelineMetrics> cache, ReadWriteLock lock) {
         lock.writeLock().lock();
-        Map<Long, TimelineMetrics> metricsMap = new TreeMap<>(cache.asMap());
+        Map<String, TimelineMetrics> metricsMap = new TreeMap<>(cache.asMap());
         cache.invalidateAll();
         lock.writeLock().unlock();
         return metricsMap;

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java
new file mode 100644
index 0000000..5af115f
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+
+import java.util.Collection;
+import java.util.Map;
+
+/**
+ * Abstract class that runs a thread that publishes metrics data to AMS collector in specified intervals.
+ */
+public abstract class AbstractMetricPublisher extends AbstractTimelineMetricsSink implements Runnable {
+
+    private static final String AMS_SITE_SSL_KEYSTORE_PATH_PROPERTY = "ssl.server.truststore.location";
+    private static final String AMS_SITE_SSL_KEYSTORE_TYPE_PROPERTY = "ssl.server.truststore.password";
+    private static final String AMS_SITE_SSL_KEYSTORE_PASSWORD_PROPERTY = "ssl.server.truststore.type";
+    private static final String AMS_SITE_HTTP_POLICY_PROPERTY = "timeline.metrics.service.http.policy";
+    private static final String AMS_SITE_COLLECTOR_WEBAPP_ADDRESS_PROPERTY = "timeline.metrics.service.webapp.address";
+    private static final String PUBLISHER_COLLECTOR_HOSTS_PROPERTY = "timeline.metrics.collector.hosts";
+    private static final String PUBLISHER_ZOOKEEPER_QUORUM_PROPERTY = "timeline.metrics.zk.quorum";
+    private static final String PUBLISHER_HOSTNAME_PROPERTY = "timeline.metrics.hostname";
+    protected static String BASE_POST_URL = "%s://%s:%s/ws/v1/timeline/metrics";
+    protected int publishIntervalInSeconds;
+    private Log LOG;
+    protected TimelineMetricsHolder timelineMetricsHolder;
+    protected Configuration configuration;
+    private String collectorProtocol;
+    private String collectorPort;
+    private Collection<String> collectorHosts;
+    private String hostname;
+    private String zkQuorum;
+
+    public AbstractMetricPublisher(TimelineMetricsHolder timelineMetricsHolder, Configuration configuration, int publishIntervalInSeconds) {
+        LOG = LogFactory.getLog(this.getClass());
+        this.configuration = configuration;
+        this.publishIntervalInSeconds = publishIntervalInSeconds;
+        this.timelineMetricsHolder = timelineMetricsHolder;
+        configure();
+    }
+
+    protected void configure() {
+        collectorProtocol = configuration.get(AMS_SITE_HTTP_POLICY_PROPERTY, "HTTP_ONLY").equalsIgnoreCase("HTTP_ONLY") ? "http" : "https";
+        collectorPort = configuration.getTrimmed(AMS_SITE_COLLECTOR_WEBAPP_ADDRESS_PROPERTY, "0.0.0.0:6188").split(":")[1];
+        collectorHosts = parseHostsStringIntoCollection(configuration.getTrimmed(PUBLISHER_COLLECTOR_HOSTS_PROPERTY, ""));
+        zkQuorum = configuration.get(PUBLISHER_ZOOKEEPER_QUORUM_PROPERTY, "");
+        hostname = configuration.get(PUBLISHER_HOSTNAME_PROPERTY, "localhost");
+        collectorHosts = parseHostsStringIntoCollection(configuration.get(PUBLISHER_COLLECTOR_HOSTS_PROPERTY, ""));
+        if (collectorHosts.isEmpty()) {
+            LOG.error("No Metric collector configured.");
+        } else {
+            if (collectorProtocol.contains("https")) {
+                String trustStorePath = configuration.get(AMS_SITE_SSL_KEYSTORE_PATH_PROPERTY).trim();
+                String trustStoreType = configuration.get(AMS_SITE_SSL_KEYSTORE_TYPE_PROPERTY).trim();
+                String trustStorePwd = configuration.get(AMS_SITE_SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
+                loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+            }
+        }
+    }
+
+    /**
+     * Publishes metrics to collector in specified intervals while not interrupted.
+     */
+    @Override
+    public void run() {
+        while (!Thread.currentThread().isInterrupted()) {
+            try {
+                Thread.sleep(this.publishIntervalInSeconds * 1000);
+            } catch (InterruptedException e) {
+                //Ignore
+            }
+            try {
+                processAndPublishMetrics(getMetricsFromCache());
+            } catch (Exception e) {
+                //ignore
+            }
+        }
+    }
+
+    /**
+     * Processes and sends metrics to collector.
+     * @param metricsFromCache
+     * @throws Exception
+     */
+    protected void processAndPublishMetrics(Map<String, TimelineMetrics> metricsFromCache) throws Exception {
+        if (metricsFromCache.size()==0) return;
+
+        LOG.info(String.format("Preparing %s timeline metrics for publishing", metricsFromCache.size()));
+        emitMetricsJson(getCollectorUri(getCurrentCollectorHost()), processMetrics(metricsFromCache));
+    }
+
+    /**
+     * Returns metrics map. Source is based on implementation.
+     * @return
+     */
+    protected abstract Map<String,TimelineMetrics> getMetricsFromCache();
+
+    /**
+     * Processes given metrics (aggregates or merges them) and converts them into json string that will be send to collector
+     * @param metricValues
+     * @return
+     */
+    protected abstract String processMetrics(Map<String, TimelineMetrics> metricValues);
+
+    protected abstract String getPostUrl();
+
+    @Override
+    protected String getCollectorUri(String host) {
+        return String.format(getPostUrl(), getCollectorProtocol(), host, getCollectorPort());
+    }
+
+    @Override
+    protected String getCollectorProtocol() {
+        return collectorProtocol;
+    }
+
+    @Override
+    protected String getCollectorPort() {
+        return collectorPort;
+    }
+
+    @Override
+    protected int getTimeoutSeconds() {
+        return DEFAULT_POST_TIMEOUT_SECONDS;
+    }
+
+    @Override
+    protected String getZookeeperQuorum() {
+        return zkQuorum;
+    }
+
+    @Override
+    protected Collection<String> getConfiguredCollectorHosts() {
+        return collectorHosts;
+    }
+
+    @Override
+    protected String getHostname() {
+        return hostname;
+    }
+
+    @Override
+    protected boolean isHostInMemoryAggregationEnabled() {
+        return false;
+    }
+
+    @Override
+    protected int getHostInMemoryAggregationPort() {
+        return 0;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java
new file mode 100644
index 0000000..c8dffab
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+/**
+ * Thread that aggregates and publishes metrics to collector on specified interval.
+ */
+public class AggregatedMetricsPublisher extends AbstractMetricPublisher {
+    private static String AGGREGATED_POST_PREFIX = "/aggregated";
+    private Log LOG;
+
+    public AggregatedMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, Configuration configuration, int interval) {
+        super(timelineMetricsHolder, configuration, interval);
+        LOG = LogFactory.getLog(this.getClass());
+    }
+
+    /**
+     * get metrics map form @TimelineMetricsHolder
+     * @return
+     */
+    @Override
+    protected Map<String, TimelineMetrics> getMetricsFromCache() {
+        return timelineMetricsHolder.extractMetricsForAggregationPublishing();
+    }
+
+    /**
+     * Aggregates given metrics and converts them into json string that will be send to collector
+     * @param metricForAggregationValues
+     * @return
+     */
+    @Override
+    protected String processMetrics(Map<String, TimelineMetrics> metricForAggregationValues) {
+        HashMap<String, TimelineMetrics> nameToMetricMap = new HashMap<>();
+        for (TimelineMetrics timelineMetrics : metricForAggregationValues.values()) {
+            for (TimelineMetric timelineMetric : timelineMetrics.getMetrics()) {
+                if (!nameToMetricMap.containsKey(timelineMetric.getMetricName())) {
+                    nameToMetricMap.put(timelineMetric.getMetricName(), new TimelineMetrics());
+                }
+                nameToMetricMap.get(timelineMetric.getMetricName()).addOrMergeTimelineMetric(timelineMetric);
+            }
+        }
+        Set<TimelineMetricWithAggregatedValues> metricAggregateMap = new HashSet<>();
+        for (TimelineMetrics metrics : nameToMetricMap.values()) {
+            double sum = 0;
+            double max = Integer.MIN_VALUE;
+            double min = Integer.MAX_VALUE;
+            int count = 0;
+            for (TimelineMetric metric : metrics.getMetrics()) {
+                for (Double value : metric.getMetricValues().values()) {
+                    sum+=value;
+                    max = Math.max(max, value);
+                    min = Math.min(min, value);
+                    count++;
+                }
+            }
+            TimelineMetric tmpMetric = new TimelineMetric(metrics.getMetrics().get(0));
+            tmpMetric.setMetricValues(new TreeMap<Long, Double>());
+            metricAggregateMap.add(new TimelineMetricWithAggregatedValues(tmpMetric, new MetricHostAggregate(sum, count, 0d, max, min)));
+        }
+        String json = null;
+        try {
+            json = mapper.writeValueAsString(new AggregationResult(metricAggregateMap, System.currentTimeMillis()));
+            LOG.debug(json);
+        } catch (Exception e) {
+            LOG.error("Failed to convert result into json", e);
+        }
+
+        return json;
+    }
+
+    @Override
+    protected String getPostUrl() {
+        return BASE_POST_URL + AGGREGATED_POST_PREFIX;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java
new file mode 100644
index 0000000..89addb7
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+
+import java.util.Map;
+
+public class RawMetricsPublisher extends AbstractMetricPublisher {
+    private final Log LOG;
+
+    public RawMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, Configuration configuration, int interval) {
+        super(timelineMetricsHolder, configuration, interval);
+        LOG = LogFactory.getLog(this.getClass());
+    }
+
+
+    @Override
+    protected Map<String, TimelineMetrics> getMetricsFromCache() {
+        return timelineMetricsHolder.extractMetricsForRawPublishing();
+    }
+
+    @Override
+    protected String processMetrics(Map<String, TimelineMetrics> metricValues) {
+        //merge everything in one TimelineMetrics object
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        for (TimelineMetrics metrics : metricValues.values()) {
+            for (TimelineMetric timelineMetric : metrics.getMetrics())
+                timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+        }
+        //map TimelineMetrics to json string
+        String json = null;
+        try {
+            json = mapper.writeValueAsString(timelineMetrics);
+            LOG.debug(json);
+        } catch (Exception e) {
+            LOG.error("Failed to convert result into json", e);
+        }
+        return json;
+    }
+
+    @Override
+    protected String getPostUrl() {
+        return BASE_POST_URL;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java
new file mode 100644
index 0000000..ea72d17
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.host.aggregator;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+import java.net.URI;
+
+import static org.easymock.EasyMock.createMockBuilder;
+
+
+public class AggregatorApplicationTest {
+    @Test
+    public void testMainNotEnoughArguments() {
+        try {
+            AggregatorApplication.main(new String[0]);
+            throw new Exception("Should not be thrown");
+        } catch (Exception e) {
+            //expected
+        }
+        try {
+            AggregatorApplication.main(new String[1]);
+            throw new Exception("Should not be thrown");
+        } catch (Exception e) {
+            //expected
+        }
+    }
+
+    @Test
+    public void testGetURI() {
+        AggregatorApplication aggregatorApplicationMock = createMockBuilder(AggregatorApplication.class)
+                .withConstructor("", "")
+                .addMockedMethod("createHttpServer")
+                .addMockedMethod("initConfiguration").createMock();
+
+        URI uri = aggregatorApplicationMock.getURI();
+        Assert.assertEquals("http://" + aggregatorApplicationMock.getHostName() + ":61888/", uri.toString());
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java
new file mode 100644
index 0000000..736fd06
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.host.aggregator;
+
+
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+import com.sun.jersey.test.framework.spi.container.TestContainerFactory;
+import com.sun.jersey.test.framework.spi.container.grizzly2.GrizzlyTestContainerFactory;
+import junit.framework.Assert;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
+import org.junit.Test;
+
+
+import javax.ws.rs.core.MediaType;
+
+import java.util.Collection;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+
+public class AggregatorWebServiceTest extends JerseyTest {
+    public AggregatorWebServiceTest() {
+        super(new WebAppDescriptor.Builder(
+                "org.apache.hadoop.metrics2.host.aggregator")
+                .contextPath("jersey-guice-filter")
+                .servletPath("/")
+                .clientConfig(new DefaultClientConfig(JacksonJaxbJsonProvider.class))
+                .build());
+    }
+
+    @Override
+    public TestContainerFactory getTestContainerFactory() {
+        return new GrizzlyTestContainerFactory();
+    }
+
+    @Test
+    public void testOkResponse() {
+        WebResource r = resource();
+        ClientResponse response = r.path("ws").path("v1").path("timeline").path("metrics")
+                .accept("text/json")
+                .get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+        assertEquals("text/json", response.getType().toString());
+    }
+
+    @Test
+    public void testWrongPath() {
+        WebResource r = resource();
+        ClientResponse response = r.path("ws").path("v1").path("timeline").path("metrics").path("aggregated")
+                .accept("text/json")
+                .get(ClientResponse.class);
+        assertEquals(404, response.getStatus());
+    }
+
+
+    @Test
+    public void testMetricsPost() {
+        TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance();
+
+        timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        TimelineMetrics timelineMetrics = TimelineMetricsHolderTest.getTimelineMetricsWithAppID("appid");
+        WebResource r = resource();
+        ClientResponse response = r.path("ws").path("v1").path("timeline").path("metrics")
+                .accept(MediaType.TEXT_PLAIN)
+                .post(ClientResponse.class, timelineMetrics);
+        assertEquals(200, response.getStatus());
+        assertEquals(MediaType.TEXT_PLAIN, response.getType().toString());
+
+        Map<String, TimelineMetrics> aggregationMap =  timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        Map<String, TimelineMetrics> rawMap =  timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        Assert.assertEquals(1, aggregationMap.size());
+        Assert.assertEquals(1, rawMap.size());
+
+        Collection<TimelineMetrics> aggregationCollection = aggregationMap.values();
+        Collection<TimelineMetrics> rawCollection = rawMap.values();
+
+        Collection<String> aggregationCollectionKeys = aggregationMap.keySet();
+        Collection<String> rawCollectionKeys = rawMap.keySet();
+
+        for (String key : aggregationCollectionKeys) {
+            Assert.assertTrue(key.contains("appid"));
+        }
+
+        for (String key : rawCollectionKeys) {
+            Assert.assertTrue(key.contains("appid"));
+        }
+
+        Assert.assertEquals(1, aggregationCollection.size());
+        Assert.assertEquals(1, rawCollection.size());
+
+        TimelineMetrics aggregationTimelineMetrics = (TimelineMetrics) aggregationCollection.toArray()[0];
+        TimelineMetrics rawTimelineMetrics = (TimelineMetrics) rawCollection.toArray()[0];
+
+
+        Assert.assertEquals(1, aggregationTimelineMetrics.getMetrics().size());
+        Assert.assertEquals(1, rawTimelineMetrics.getMetrics().size());
+
+        Assert.assertEquals("appid", aggregationTimelineMetrics.getMetrics().get(0).getAppId());
+        Assert.assertEquals("appid", rawTimelineMetrics.getMetrics().get(0).getAppId());
+
+        aggregationMap =  timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        rawMap =  timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        //Cache should be empty after extraction
+        Assert.assertEquals(0, aggregationMap.size());
+        Assert.assertEquals(0, rawMap.size());
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java
new file mode 100644
index 0000000..7d8ebf4
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.host.aggregator;
+
+import junit.framework.Assert;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.util.Collection;
+import java.util.Map;
+
+
+public class TimelineMetricsHolderTest {
+    private TimelineMetricsHolder timelineMetricsHolderInstance;
+
+    public void clearHolderSingleton() throws NoSuchFieldException, IllegalAccessException {
+        Class timelineMetricHolderClass = TimelineMetricsHolder.class;
+        Field field = timelineMetricHolderClass.getDeclaredField("instance");
+        field.setAccessible(true);
+        field.set(field, null);
+    }
+
+    @Test
+    public void testGetInstanceDefaultValues() throws Exception {
+        clearHolderSingleton();
+        Assert.assertNotNull(TimelineMetricsHolder.getInstance());
+    }
+
+    @Test
+    public void testGetInstanceWithParameters() throws Exception {
+        clearHolderSingleton();
+        Assert.assertNotNull(TimelineMetricsHolder.getInstance(1,2));
+    }
+
+    @Test
+    public void testCache() throws Exception {
+        clearHolderSingleton();
+        timelineMetricsHolderInstance = TimelineMetricsHolder.getInstance(4,4);
+        timelineMetricsHolderInstance.putMetricsForAggregationPublishing(getTimelineMetricsWithAppID("aggr"));
+        timelineMetricsHolderInstance.putMetricsForRawPublishing(getTimelineMetricsWithAppID("raw"));
+
+        Map<String, TimelineMetrics> aggregationMap =  timelineMetricsHolderInstance.extractMetricsForAggregationPublishing();
+        Map<String, TimelineMetrics> rawMap =  timelineMetricsHolderInstance.extractMetricsForRawPublishing();
+
+        Assert.assertEquals(1, aggregationMap.size());
+        Assert.assertEquals(1, rawMap.size());
+
+        Collection<TimelineMetrics> aggregationCollection = aggregationMap.values();
+        Collection<TimelineMetrics> rawCollection = rawMap.values();
+
+        Collection<String> aggregationCollectionKeys = aggregationMap.keySet();
+        Collection<String> rawCollectionKeys = rawMap.keySet();
+
+        for (String key : aggregationCollectionKeys) {
+            Assert.assertTrue(key.contains("aggr"));
+        }
+
+        for (String key : rawCollectionKeys) {
+            Assert.assertTrue(key.contains("raw"));
+        }
+
+        Assert.assertEquals(1, aggregationCollection.size());
+        Assert.assertEquals(1, rawCollection.size());
+
+        TimelineMetrics aggregationTimelineMetrics = (TimelineMetrics) aggregationCollection.toArray()[0];
+        TimelineMetrics rawTimelineMetrics = (TimelineMetrics) rawCollection.toArray()[0];
+
+
+        Assert.assertEquals(1, aggregationTimelineMetrics.getMetrics().size());
+        Assert.assertEquals(1, rawTimelineMetrics.getMetrics().size());
+
+        Assert.assertEquals("aggr", aggregationTimelineMetrics.getMetrics().get(0).getAppId());
+        Assert.assertEquals("raw", rawTimelineMetrics.getMetrics().get(0).getAppId());
+
+        aggregationMap =  timelineMetricsHolderInstance.extractMetricsForAggregationPublishing();
+        rawMap =  timelineMetricsHolderInstance.extractMetricsForRawPublishing();
+
+        //Cache should be empty after extraction
+        Assert.assertEquals(0, aggregationMap.size());
+        Assert.assertEquals(0, rawMap.size());
+    }
+
+    public static TimelineMetrics getTimelineMetricsWithAppID(String appId) {
+        TimelineMetric timelineMetric = new TimelineMetric();
+        timelineMetric.setAppId(appId);
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+        return timelineMetrics;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java
new file mode 100644
index 0000000..a8ddbee
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolderTest;
+import org.junit.Test;
+
+import java.util.Map;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+public class AbstractMetricPublisherTest {
+    @Test
+    public void testProcessAndPublishMetrics() throws Exception {
+        AbstractMetricPublisher publisherMock =
+                createMockBuilder(RawMetricsPublisher.class)
+                        .withConstructor(TimelineMetricsHolder.getInstance(), new Configuration(), 60)
+                        .addMockedMethod("processMetrics")
+                        .addMockedMethod("getCollectorUri")
+                        .addMockedMethod("emitMetricsJson")
+                        .addMockedMethod("getCurrentCollectorHost").createStrictMock();
+
+        TimelineMetricsHolder.getInstance().putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw"));
+        expect(publisherMock.getCurrentCollectorHost()).andReturn("collectorhost").once();
+        expect(publisherMock.getCollectorUri(anyString())).andReturn("https://collectorhost:11/metrics").once();
+        expect(publisherMock.processMetrics(anyObject(Map.class))).andReturn("{metrics}").once();
+        expect(publisherMock.emitMetricsJson("https://collectorhost:11/metrics", "{metrics}")).andReturn(true).once();
+
+        replay(publisherMock);
+
+        publisherMock.processAndPublishMetrics(TimelineMetricsHolder.getInstance().extractMetricsForRawPublishing());
+
+        verify(publisherMock);
+    }
+
+    @Test
+    public void testRunAndStop() throws Exception {
+        AbstractMetricPublisher publisherMock = createMockBuilder(RawMetricsPublisher.class)
+                .withConstructor(TimelineMetricsHolder.getInstance(), new Configuration(), 1)
+                .addMockedMethod("processAndPublishMetrics").createStrictMock();
+        publisherMock.processAndPublishMetrics(anyObject(Map.class));
+        expectLastCall().times(1);
+
+
+        Thread t = createMockBuilder(Thread.class)
+                .withConstructor(publisherMock)
+                .addMockedMethod("isInterrupted").createStrictMock();
+        expect(t.isInterrupted()).andReturn(false).once();
+        expect(t.isInterrupted()).andReturn(true).once();
+
+        replay(publisherMock, t);
+
+        t.start();
+
+        Thread.sleep(2222);
+
+        verify(publisherMock, t);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java
new file mode 100644
index 0000000..3413052
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import junit.framework.Assert;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolderTest;
+import org.junit.Test;
+
+import org.apache.hadoop.conf.Configuration;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class AggregatedMetricsPublisherTest {
+
+    @Test
+    public void testProcessMetrics() throws Exception {
+        Configuration configuration = new Configuration();
+        TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance();
+        timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        TreeMap<Long, Double> metric1App1Metrics = new TreeMap<>();
+        metric1App1Metrics.put(1L, 1d);
+        metric1App1Metrics.put(2L, 2d);
+        metric1App1Metrics.put(3L, 3d);
+        timelineMetricsHolder.putMetricsForAggregationPublishing(getTimelineMetricsForAppId("metricName1", "app1", metric1App1Metrics));
+
+        TreeMap<Long, Double> metric2App2Metrics = new TreeMap<>();
+        metric2App2Metrics.put(1L, 4d);
+        metric2App2Metrics.put(2L, 5d);
+        metric2App2Metrics.put(3L, 6d);
+        timelineMetricsHolder.putMetricsForAggregationPublishing(getTimelineMetricsForAppId("metricName2", "app2", metric2App2Metrics));
+
+        TreeMap<Long, Double> metric3App3Metrics = new TreeMap<>();
+        metric3App3Metrics.put(1L, 7d);
+        metric3App3Metrics.put(2L, 8d);
+        metric3App3Metrics.put(3L, 9d);
+
+        timelineMetricsHolder.putMetricsForAggregationPublishing(getTimelineMetricsForAppId("metricName3", "app3", metric3App3Metrics));
+
+
+        AggregatedMetricsPublisher aggregatedMetricsPublisher =
+                new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 60);
+
+        String aggregatedJson = aggregatedMetricsPublisher.processMetrics(timelineMetricsHolder.extractMetricsForAggregationPublishing());
+        String expectedMetric1App1 = "{\"timelineMetric\":{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName1\",\"appid\":\"app1\",\"starttime\":0,\"metrics\":{}},\"metricAggregate\":{\"sum\":6.0,\"deviation\":0.0,\"max\":3.0,\"min\":1.0,\"numberOfSamples\":3}}";
+        String expectedMetric2App2 = "{\"timelineMetric\":{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName2\",\"appid\":\"app2\",\"starttime\":0,\"metrics\":{}},\"metricAggregate\":{\"sum\":15.0,\"deviation\":0.0,\"max\":6.0,\"min\":4.0,\"numberOfSamples\":3}}";
+        String expectedMetric3App3 = "{\"timelineMetric\":{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName3\",\"appid\":\"app3\",\"starttime\":0,\"metrics\":{}},\"metricAggregate\":{\"sum\":24.0,\"deviation\":0.0,\"max\":9.0,\"min\":7.0,\"numberOfSamples\":3}}";
+        Assert.assertNotNull(aggregatedJson);
+        Assert.assertTrue(aggregatedJson.contains(expectedMetric1App1));
+        Assert.assertTrue(aggregatedJson.contains(expectedMetric3App3));
+        Assert.assertTrue(aggregatedJson.contains(expectedMetric2App2));
+    }
+
+    @Test
+    public void testGetPostUrl() {
+        Configuration configuration = new Configuration();
+        AggregatedMetricsPublisher aggregatedMetricsPublisher =
+                new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        String actualURL = aggregatedMetricsPublisher.getPostUrl();
+        String expectedURL = "%s://%s:%s/ws/v1/timeline/metrics/aggregated";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+    }
+
+    @Test
+    public void testGetCollectorUri() {
+        //default configuration
+        Configuration configuration = new Configuration();
+        AbstractMetricPublisher aggregatedMetricsPublisher =
+                new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        String actualURL = aggregatedMetricsPublisher.getCollectorUri("c6401.ambari.apache.org");
+        String expectedURL = "http://c6401.ambari.apache.org:6188/ws/v1/timeline/metrics/aggregated";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+
+        //https configuration
+        configuration = new Configuration();
+        configuration.set("timeline.metrics.service.http.policy", "HTTPS_ONLY");
+        aggregatedMetricsPublisher =
+                new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        actualURL = aggregatedMetricsPublisher.getCollectorUri("c6402.ambari.apache.org");
+        expectedURL = "https://c6402.ambari.apache.org:6188/ws/v1/timeline/metrics/aggregated";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+
+        //custom port configuration
+        configuration = new Configuration();
+        configuration.set("timeline.metrics.service.webapp.address", "0.0.0.0:8888");
+        aggregatedMetricsPublisher =
+                new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        actualURL = aggregatedMetricsPublisher.getCollectorUri("c6403.ambari.apache.org");
+        expectedURL = "http://c6403.ambari.apache.org:8888/ws/v1/timeline/metrics/aggregated";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+    }
+
+    @Test
+    public void testGetMetricsFromCache() throws InterruptedException {
+        TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance(4,4);
+        timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        timelineMetricsHolder.putMetricsForAggregationPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("aggr1"));
+        timelineMetricsHolder.putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw"));
+        timelineMetricsHolder.putMetricsForAggregationPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("aggr2"));
+
+        Configuration configuration = new Configuration();
+        AggregatedMetricsPublisher aggregatedMetricsPublisher =
+                new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+
+        Map<String, TimelineMetrics> metricsFromCache = aggregatedMetricsPublisher.getMetricsFromCache();
+        Assert.assertNotNull(metricsFromCache);
+        Collection<TimelineMetrics> actualTimelineMetrics = metricsFromCache.values();
+        Assert.assertNotNull(actualTimelineMetrics);
+        Assert.assertEquals(2, actualTimelineMetrics.size());
+
+        for (TimelineMetrics timelineMetrics : actualTimelineMetrics) {
+            List<TimelineMetric> metrics = timelineMetrics.getMetrics();
+            Assert.assertEquals(1, metrics.size());
+            Assert.assertTrue(metrics.get(0).getAppId().contains("aggr"));
+        }
+
+    }
+
+    TimelineMetrics getTimelineMetricsForAppId(String metricName, String appId, TreeMap<Long, Double> metricValues) {
+        TimelineMetric timelineMetric = new TimelineMetric();
+        timelineMetric.setMetricName(metricName);
+        timelineMetric.setAppId(appId);
+        timelineMetric.setMetricValues(metricValues);
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+        return timelineMetrics;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java
new file mode 100644
index 0000000..60510d2
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolderTest;
+import org.junit.Test;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+
+public class RawMetricsPublisherTest {
+    @Test
+    public void testProcessMetrics() throws Exception {
+        Configuration configuration = new Configuration();
+        TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance();
+
+        timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        TreeMap<Long, Double> metric1App1Metrics = new TreeMap<>();
+        metric1App1Metrics.put(1L, 1d);
+        metric1App1Metrics.put(2L, 2d);
+        metric1App1Metrics.put(3L, 3d);
+        timelineMetricsHolder.putMetricsForRawPublishing(getTimelineMetricsForAppId("metricName1", "app1", metric1App1Metrics));
+
+        TreeMap<Long, Double> metric2App2Metrics = new TreeMap<>();
+        metric2App2Metrics.put(1L, 4d);
+        metric2App2Metrics.put(2L, 5d);
+        metric2App2Metrics.put(3L, 6d);
+        timelineMetricsHolder.putMetricsForRawPublishing(getTimelineMetricsForAppId("metricName2", "app2", metric2App2Metrics));
+
+        TreeMap<Long, Double> metric3App3Metrics = new TreeMap<>();
+        metric3App3Metrics.put(1L, 7d);
+        metric3App3Metrics.put(2L, 8d);
+        metric3App3Metrics.put(3L, 9d);
+
+        timelineMetricsHolder.putMetricsForRawPublishing(getTimelineMetricsForAppId("metricName3", "app3", metric3App3Metrics));
+
+
+        RawMetricsPublisher rawMetricsPublisher =
+                new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 60);
+
+        String rawJson = rawMetricsPublisher.processMetrics(timelineMetricsHolder.extractMetricsForRawPublishing());
+        String expectedResult = "{\"metrics\":[{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName1\",\"appid\":\"app1\",\"starttime\":0,\"metrics\":{\"1\":1.0,\"2\":2.0,\"3\":3.0}},{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName2\",\"appid\":\"app2\",\"starttime\":0,\"metrics\":{\"1\":4.0,\"2\":5.0,\"3\":6.0}},{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName3\",\"appid\":\"app3\",\"starttime\":0,\"metrics\":{\"1\":7.0,\"2\":8.0,\"3\":9.0}}]}";
+        Assert.assertNotNull(rawJson);
+        Assert.assertEquals(expectedResult, rawJson);
+    }
+
+    @Test
+    public void testGetPostUrl() {
+        Configuration configuration = new Configuration();
+        RawMetricsPublisher rawMetricsPublisher =
+                new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        String actualURL = rawMetricsPublisher.getPostUrl();
+        String expectedURL = "%s://%s:%s/ws/v1/timeline/metrics";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+    }
+
+    @Test
+    public void testGetCollectorUri() {
+        //default configuration
+        Configuration configuration = new Configuration();
+        AbstractMetricPublisher rawMetricsPublisher =
+                new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        String actualURL = rawMetricsPublisher.getCollectorUri("c6401.ambari.apache.org");
+        String expectedURL = "http://c6401.ambari.apache.org:6188/ws/v1/timeline/metrics";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+
+        //https configuration
+        configuration = new Configuration();
+        configuration.set("timeline.metrics.service.http.policy", "HTTPS_ONLY");
+        rawMetricsPublisher =
+                new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        actualURL = rawMetricsPublisher.getCollectorUri("c6402.ambari.apache.org");
+        expectedURL = "https://c6402.ambari.apache.org:6188/ws/v1/timeline/metrics";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+
+        //custom port configuration
+        configuration = new Configuration();
+        configuration.set("timeline.metrics.service.webapp.address", "0.0.0.0:8888");
+        rawMetricsPublisher =
+                new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+        actualURL = rawMetricsPublisher.getCollectorUri("c6403.ambari.apache.org");
+        expectedURL = "http://c6403.ambari.apache.org:8888/ws/v1/timeline/metrics";
+        Assert.assertNotNull(actualURL);
+        Assert.assertEquals(expectedURL, actualURL);
+    }
+
+    @Test
+    public void testGetMetricsFromCache() throws InterruptedException {
+
+        TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance(4,4);
+        timelineMetricsHolder.extractMetricsForAggregationPublishing();
+        timelineMetricsHolder.extractMetricsForRawPublishing();
+
+        timelineMetricsHolder.putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw1"));
+        timelineMetricsHolder.putMetricsForAggregationPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("aggr"));
+        timelineMetricsHolder.putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw2"));
+
+        Configuration configuration = new Configuration();
+        RawMetricsPublisher rawMetricsPublisher =
+                new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+
+        Map<String, TimelineMetrics> metricsFromCache = rawMetricsPublisher.getMetricsFromCache();
+        Assert.assertNotNull(metricsFromCache);
+        Collection<TimelineMetrics> actualTimelineMetrics = metricsFromCache.values();
+        Assert.assertNotNull(actualTimelineMetrics);
+        Assert.assertEquals(2, actualTimelineMetrics.size());
+
+        for (TimelineMetrics timelineMetrics : actualTimelineMetrics) {
+            List<TimelineMetric> metrics = timelineMetrics.getMetrics();
+            Assert.assertEquals(1, metrics.size());
+            Assert.assertTrue(metrics.get(0).getAppId().contains("raw"));
+        }
+
+    }
+
+    TimelineMetrics getTimelineMetricsForAppId(String metricName, String appId, TreeMap<Long, Double> metricValues) {
+        TimelineMetric timelineMetric = new TimelineMetric();
+        timelineMetric.setMetricName(metricName);
+        timelineMetric.setAppId(appId);
+        timelineMetric.setMetricValues(metricValues);
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+        return timelineMetrics;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
index 2249e53..ba05e9b 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
@@ -42,9 +42,10 @@ class Aggregator(threading.Thread):
     ams_log_file = "ambari-metrics-aggregator.log"
     additional_classpath = ':{0}'.format(config_dir)
     ams_log_dir = self._config.ams_monitor_log_dir()
+    hostname = self._config.get_hostname_config()
     logger.info('Starting Aggregator thread.')
-    cmd = "{0}/bin/java {1} -Dams.log.dir={2} -Dams.log.file={3} -cp /var/lib/ambari-metrics-monitor/lib/*{4} {5} {6}"\
-      .format(java_home, jvm_agrs, ams_log_dir, ams_log_file, additional_classpath, class_name, collector_hosts)
+    cmd = "{0}/bin/java {1} -Dams.log.dir={2} -Dams.log.file={3} -cp /var/lib/ambari-metrics-monitor/lib/*{4} {5} {6} {7}"\
+      .format(java_home, jvm_agrs, ams_log_dir, ams_log_file, additional_classpath, class_name, hostname, collector_hosts)
 
     logger.info("Executing : {0}".format(cmd))
 
@@ -60,6 +61,7 @@ class Aggregator(threading.Thread):
     if self._aggregator_process :
       logger.info('Stopping Aggregator thread.')
       self._aggregator_process.terminate()
+      self._aggregator_process = None
 
 class AggregatorWatchdog(threading.Thread):
   SLEEP_TIME = 30


[22/50] [abbrv] ambari git commit: AMBARI-21054. Add ppc as a new OS for User. (aonishuk)

Posted by nc...@apache.org.
AMBARI-21054. Add ppc as a new OS for User. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e2fbd0f0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e2fbd0f0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e2fbd0f0

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e2fbd0f0675d68e54765d83a07a2ea825e2d3821
Parents: d21d434
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Jun 8 15:23:29 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Jun 8 15:23:29 2017 +0300

----------------------------------------------------------------------
 .../server/controller/AmbariManagementControllerImplTest.java | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e2fbd0f0/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index fde9260..0312579 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -101,6 +101,7 @@ import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.junit.Before;
@@ -133,6 +134,7 @@ public class AmbariManagementControllerImplTest {
   private static final AmbariMetaInfo ambariMetaInfo = createMock(AmbariMetaInfo.class);
   private static final Users users = createMock(Users.class);
   private static final AmbariSessionManager sessionManager = createNiceMock(AmbariSessionManager.class);
+  private static final OsFamily osFamily = createNiceMock(OsFamily.class);
 
   @BeforeClass
   public static void setupAuthentication() {
@@ -144,7 +146,7 @@ public class AmbariManagementControllerImplTest {
 
   @Before
   public void before() throws Exception {
-    reset(ldapDataPopulator, clusters, actionDBAccessor, ambariMetaInfo, users, sessionManager);
+    reset(ldapDataPopulator, clusters, actionDBAccessor, ambariMetaInfo, users, sessionManager, osFamily);
   }
 
   @Test
@@ -1988,7 +1990,7 @@ public class AmbariManagementControllerImplTest {
   @Test
   public void testPopulateServicePackagesInfo() throws Exception {
     Capture<AmbariManagementController> controllerCapture = EasyMock.newCapture();
-    Injector injector = createStrictMock(Injector.class);
+    Injector injector = Guice.createInjector(Modules.override(new InMemoryDefaultTestModule()).with(new MockModule()));
     MaintenanceStateHelper maintHelper = createNiceMock(MaintenanceStateHelper.class);
 
     ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
@@ -2198,6 +2200,7 @@ public class AmbariManagementControllerImplTest {
       binder.bind(AmbariMetaInfo.class).toInstance(ambariMetaInfo);
       binder.bind(Users.class).toInstance(users);
       binder.bind(AmbariSessionManager.class).toInstance(sessionManager);
+      binder.bind(OsFamily.class).toInstance(osFamily);
     }
   }
 


[38/50] [abbrv] ambari git commit: AMBARI-21213 Consolidate UI for the "Loading..." text (dili)

Posted by nc...@apache.org.
AMBARI-21213 Consolidate UI for the "Loading..." text (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d97aa1df
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d97aa1df
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d97aa1df

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d97aa1dfc5c238544b65fcd67534de64aff556a2
Parents: 29f7508
Author: Di Li <di...@apache.org>
Authored: Fri Jun 9 15:43:57 2017 -0400
Committer: Di Li <di...@apache.org>
Committed: Fri Jun 9 15:43:57 2017 -0400

----------------------------------------------------------------------
 .../main/admin/highAvailability/progress_popup_controller.js      | 2 +-
 ambari-web/app/messages.js                                        | 3 ---
 ambari-web/app/templates/main.hbs                                 | 2 +-
 ambari-web/app/utils/ajax/ajax.js                                 | 2 +-
 .../app/views/common/configs/service_config_container_view.js     | 2 +-
 ambari-web/app/views/loading.js                                   | 2 +-
 ambari-web/app/views/main/alerts/definition_details_view.js       | 2 +-
 ambari-web/test/views/main/alerts/definition_details_view_test.js | 2 +-
 8 files changed, 7 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
index 7231e5c..4cbe59b 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
@@ -83,7 +83,7 @@ App.HighAvailabilityProgressPopupController = Ember.Controller.extend({
   initPopup: function (popupTitle, requestIds, progressController, showSpinner, stageId) {
     if (showSpinner) {
       var loadingPopup = App.ModalPopup.show({
-        header: Em.I18n.t('jobs.loadingTasks'),
+        header: Em.I18n.t('common.loading.eclipses'),
         primary: false,
         secondary: false,
         bodyClass: Ember.View.extend({

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 6f317a3..faad84c 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -30,7 +30,6 @@ Em.I18n.translations = {
     '<br/>Alternatively login as an Ambari local user using the local login page.<br />' +
     '<a href="{0}" target="_blank">{0}</a>',
 
-  'app.loadingPlaceholder': 'Loading...',
   'app.versionMismatchAlert.title': 'Ambari Server / Web Client Version Mismatch',
   'app.versionMismatchAlert.body': 'Ambari Server and Web Client versions do not match:<br> ' +
     '<br>Ambari Server: <strong>{0}</strong>' +
@@ -3169,9 +3168,7 @@ Em.I18n.translations = {
   'menu.item.views':'<i class="glyphicon glyphicon-th"></i>',
   'menu.item.views.noViews':'No Views',
 
-  'bulkOperation.loading': 'Loading...',
   'jobs.nothingToShow': 'No jobs to display',
-  'jobs.loadingTasks': 'Loading...',
   'jobs.error.ats.down': 'Jobs data cannot be shown since YARN App Timeline Server is not running.',
   'jobs.error.400': 'Unable to load data.',
   'jobs.table.custom.date.am':'AM',

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/templates/main.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main.hbs b/ambari-web/app/templates/main.hbs
index bf6fa56..a1ba86a 100644
--- a/ambari-web/app/templates/main.hbs
+++ b/ambari-web/app/templates/main.hbs
@@ -22,7 +22,7 @@
   </div>
 {{else}}
   {{#unless isClusterDataLoaded}}
-    <h2>{{t app.loadingPlaceholder}}</h2>
+    <h2>{{t common.loading.eclipses}}</h2>
     <div class="progress">
       <div class="progress-bar progress-bar-striped active" {{QAAttr "main-progress-bar"}} {{bindAttr style="controller.clusterDataLoadedPercent"}}></div>
     </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 492a216..c595181 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -3131,7 +3131,7 @@ var ajax = Em.Object.extend({
     if(config.hasOwnProperty("showLoadingPopup") && config.showLoadingPopup === true) {
       loadingPopupTimeout = setTimeout(function() {
         loadingPopup = App.ModalPopup.show({
-          header: Em.I18n.t('jobs.loadingTasks'),
+          header: Em.I18n.t('common.loading.eclipses'),
           backdrop: false,
           primary: false,
           secondary: false,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/views/common/configs/service_config_container_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/service_config_container_view.js b/ambari-web/app/views/common/configs/service_config_container_view.js
index a7cee0c..d1ff009 100644
--- a/ambari-web/app/views/common/configs/service_config_container_view.js
+++ b/ambari-web/app/views/common/configs/service_config_container_view.js
@@ -40,7 +40,7 @@ App.ServiceConfigContainerView = Em.ContainerView.extend({
           selectedServiceBinding: controllerRoute + '.selectedService',
           serviceConfigsByCategoryView: Em.ContainerView.create(),
           willDestroyElement: function () {
-            $('.loading').append(Em.I18n.t('app.loadingPlaceholder'));
+            $('.loading').append(Em.I18n.t('common.loading.eclipses'));
           },
           didInsertElement: function () {
             $('.loading').empty();

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/views/loading.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/loading.js b/ambari-web/app/views/loading.js
index 81d4a60..47468bb 100644
--- a/ambari-web/app/views/loading.js
+++ b/ambari-web/app/views/loading.js
@@ -20,5 +20,5 @@ var App = require('app');
 
 App.LoadingView = Em.View.extend({
     tagName: 'h2',
-    template: Ember.Handlebars.compile('{{t app.loadingPlaceholder}}')
+    template: Ember.Handlebars.compile('{{t common.loading.eclipses}}')
 });
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/app/views/main/alerts/definition_details_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/alerts/definition_details_view.js b/ambari-web/app/views/main/alerts/definition_details_view.js
index 5c8efff..bb9e738 100644
--- a/ambari-web/app/views/main/alerts/definition_details_view.js
+++ b/ambari-web/app/views/main/alerts/definition_details_view.js
@@ -216,7 +216,7 @@ App.MainAlertDefinitionDetailsView = App.TableView.extend({
     template: Ember.Handlebars.compile('<span>{{view.count}}</span>'),
     count: function () {
       var lastDayAlertsCount = this.get('parentView.controller.lastDayAlertsCount');
-      return lastDayAlertsCount ? lastDayAlertsCount[this.get('hostName')] || 0 : Em.I18n.t('app.loadingPlaceholder');
+      return lastDayAlertsCount ? lastDayAlertsCount[this.get('hostName')] || 0 : Em.I18n.t('common.loading.eclipses');
     }.property('parentView.controller.lastDayAlertsCount', 'hostName')
   }),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d97aa1df/ambari-web/test/views/main/alerts/definition_details_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/alerts/definition_details_view_test.js b/ambari-web/test/views/main/alerts/definition_details_view_test.js
index b3c4712..7b96858 100644
--- a/ambari-web/test/views/main/alerts/definition_details_view_test.js
+++ b/ambari-web/test/views/main/alerts/definition_details_view_test.js
@@ -208,7 +208,7 @@ describe('App.MainAlertDefinitionDetailsView', function () {
 
       it("lastDayAlertsCount is null", function () {
         lastDayCountView.set('parentView.controller.lastDayAlertsCount', null);
-        expect(lastDayCountView.get('count')).to.equal(Em.I18n.t('app.loadingPlaceholder'));
+        expect(lastDayCountView.get('count')).to.equal(Em.I18n.t('common.loading.eclipses'));
       });
 
       it("lastDayAlertsCount does not contain host", function () {


[25/50] [abbrv] ambari git commit: AMBARI-21202 - YARN service advisor has spelling error in yarn-site

Posted by nc...@apache.org.
AMBARI-21202 - YARN service advisor has spelling error in yarn-site


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7dc91c5a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7dc91c5a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7dc91c5a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 7dc91c5a8e75966f642a4890ed7a5750687b5928
Parents: aa6b330
Author: Tim Thorpe <tt...@apache.org>
Authored: Thu Jun 8 08:49:32 2017 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Thu Jun 8 08:49:32 2017 -0700

----------------------------------------------------------------------
 .../resources/common-services/YARN/3.0.0.3.0/service_advisor.py  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7dc91c5a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
index 3dd39e5..0fb538d 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
@@ -1796,7 +1796,7 @@ class YARNValidator(service_advisor.ServiceAdvisor):
 
     self.validators = [("yarn-site", self.validateYARNSiteConfigurationsFromHDP206),
                        ("yarn-site", self.validateYARNSiteConfigurationsFromHDP25),
-                       ("yarn-ste" , self.validateYarnSiteConfigurationsFromHDP26),
+                       ("yarn-site" , self.validateYARNSiteConfigurationsFromHDP26),
                        ("yarn-env", self.validateYARNEnvConfigurationsFromHDP206),
                        ("yarn-env", self.validateYARNEnvConfigurationsFromHDP22),
                        ("ranger-yarn-plugin-properties", self.validateYARNRangerPluginConfigurationsFromHDP22)]
@@ -1853,7 +1853,7 @@ class YARNValidator(service_advisor.ServiceAdvisor):
     validationProblems = self.toConfigurationValidationProblems(validationItems, "yarn-site")
     return validationProblems
 
-  def validateYarnSiteConfigurationsFromHDP26(self, properties, recommendedDefaults, configurations, services, hosts):
+  def validateYARNSiteConfigurationsFromHDP26(self, properties, recommendedDefaults, configurations, services, hosts):
     validationItems = []
     siteProperties = services["configurations"]["yarn-site"]["properties"]
     if services["configurations"]["yarn-site"]["properties"]["yarn.http.policy"] == 'HTTP_ONLY':


[12/50] [abbrv] ambari git commit: AMBARI-21161 Able to safe guard Ambari server against circular dependencies in role command order (dili)

Posted by nc...@apache.org.
AMBARI-21161 Able to safe guard Ambari server against circular dependencies in role command order (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/acb2f989
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/acb2f989
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/acb2f989

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: acb2f9896e55db202f5b054565d09c0e598011ed
Parents: 1586a1a
Author: Di Li <di...@apache.org>
Authored: Wed Jun 7 12:26:47 2017 -0400
Committer: Di Li <di...@apache.org>
Committed: Wed Jun 7 12:26:47 2017 -0400

----------------------------------------------------------------------
 .../ambari/server/stageplanner/RoleGraph.java   |  49 ++++-
 .../server/stageplanner/TestStagePlanner.java   | 197 +++++++++++++++++--
 .../stacks/HDP/2.0.6.1/role_command_order.json  | 101 ++--------
 3 files changed, 241 insertions(+), 106 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/acb2f989/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
index 024a5e1..d6d26c9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
@@ -19,10 +19,12 @@ package org.apache.ambari.server.stageplanner;
 
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.actionmanager.CommandExecutionType;
@@ -165,10 +167,15 @@ public class RoleGraph {
    * Returns a list of stages that need to be executed one after another
    * to execute the DAG generated in the last {@link #build(Stage)} call.
    */
-  public List<Stage> getStages() {
+  public List<Stage> getStages() throws AmbariException {
     long initialStageId = initialStage.getStageId();
     List<Stage> stageList = new ArrayList<>();
     List<RoleGraphNode> firstStageNodes = new ArrayList<>();
+    if(!graph.isEmpty()){
+      LOG.info("Detecting cycle graphs");
+      LOG.info(stringifyGraph());
+      breakCycleGraph();
+    }
     while (!graph.isEmpty()) {
       if (LOG.isDebugEnabled()) {
         LOG.debug(stringifyGraph());
@@ -313,4 +320,44 @@ public class RoleGraph {
     }
     return builder.toString();
   }
+
+  /**
+   * Cycle graphs indicate circular dependencies such as the following example
+   * that can cause Ambari enter an infinite loop while building stages.
+   *   (DATANODE, START, 2) --> (NAMENODE, START, 2) --> (SECONDARY_NAMENODE, START, 3)
+   *   (HDFS_CLIENT, INSTALL, 0) --> (DATANODE, START, 2) --> (NAMENODE, START, 2) --> (SECONDARY_NAMENODE, START, 3)
+   *   (NAMENODE, START, 2) --> (DATANODE, START, 2) --> (SECONDARY_NAMENODE, START, 3)
+   *   (SECONDARY_NAMENODE, START, 3)
+   * It is important to safe guard against cycle graphs,
+   * when Ambari supports mpacks, custom services and service level role command order.
+   * */
+  public void breakCycleGraph() throws AmbariException{
+    List<String> edges = new ArrayList<String>();
+    for (String role : graph.keySet()){
+      RoleGraphNode fromNode = graph.get(role);
+      String fnRole = fromNode.getRole().name();
+      String fnCommand = fromNode.getCommand().name();
+
+      Iterator<RoleGraphNode> it = fromNode.getEdges().iterator();
+      while(it.hasNext()){
+        RoleGraphNode toNode = it.next();
+        String tnRole = toNode.getRole().name();
+        String tnCommand = toNode.getCommand().name();
+        //Check if the reversed edge exists in the list already
+        //If the edit exists, print an error message and break the edge
+        String format = "%s:%s --> %s:%s";
+        String edge = String.format(format, fnRole, fnCommand, tnRole, tnCommand);
+        String reversedEdge = String.format(format, tnRole, tnCommand, fnRole, fnCommand);
+        if (edges.contains(reversedEdge)){
+          String msg = String.format(
+              "Circular dependencies detected between %s and %s for %s. "
+              + "%s already exists in the role command order.", fnRole, tnRole, edge, reversedEdge);
+          LOG.error(msg);
+          throw new AmbariException(msg);
+        } else {
+          edges.add(edge);
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/acb2f989/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java b/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
index 271ee89..ae3b316 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
@@ -44,13 +44,18 @@ import org.apache.ambari.server.utils.StageUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.inject.Guice;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 
+import junit.framework.Assert;
+
 public class TestStagePlanner {
+  private static final Logger log = LoggerFactory.getLogger(TestStagePlanner.class);
 
   private Injector injector;
 
@@ -88,7 +93,7 @@ public class TestStagePlanner {
     rg.build(stage);
     List<Stage> outStages = rg.getStages();
     for (Stage s: outStages) {
-      System.out.println(s.toString());
+      log.info(s.toString());
     }
     assertEquals(1, outStages.size());
     assertEquals(stage.getExecutionCommands(hostname), outStages.get(0)
@@ -96,7 +101,165 @@ public class TestStagePlanner {
   }
 
   @Test
-  public void testMultiStagePlan() {
+  public void testSCCInGraphDetectedShort() {
+    ClusterImpl cluster = mock(ClusterImpl.class);
+    when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6.1"));
+
+    Service hbaseService = mock(Service.class);
+    when(hbaseService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+    Service zkService = mock(Service.class);
+    when(zkService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+
+    when(cluster.getServices()).thenReturn(ImmutableMap.<String, Service>builder()
+        .put("HBASE", hbaseService)
+        .put("ZOOKEEPER", zkService)
+        .build());
+
+    RoleCommandOrder rco = roleCommandOrderProvider.getRoleCommandOrder(cluster);
+    RoleGraph rg = roleGraphFactory.createNew(rco);
+    long now = System.currentTimeMillis();
+    Stage stage = StageUtils.getATestStage(1, 1, "host1", "", "");
+    stage.addHostRoleExecutionCommand("host2", Role.HBASE_MASTER,
+        RoleCommand.START, new ServiceComponentHostStartEvent("HBASE_MASTER",
+            "host2", now), "cluster1", "HBASE", false, false);
+    stage.addHostRoleExecutionCommand("host3", Role.ZOOKEEPER_SERVER,
+        RoleCommand.START, new ServiceComponentHostStartEvent("ZOOKEEPER_SERVER",
+            "host3", now), "cluster1", "ZOOKEEPER", false, false);
+    log.info("Build and ready to detect circular dependencies - short chain");
+    rg.build(stage);
+    boolean exceptionThrown = false;
+    try {
+      List<Stage> outStages = rg.getStages();
+    } catch (AmbariException e) {
+      exceptionThrown = true;
+    }
+    Assert.assertTrue(exceptionThrown);
+  }
+
+  @Test
+  public void testSCCInGraphDetectedLong() {
+    ClusterImpl cluster = mock(ClusterImpl.class);
+    when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6.1"));
+
+    Service hbaseService = mock(Service.class);
+    when(hbaseService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+    Service zkService = mock(Service.class);
+    when(zkService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+    Service yarnService = mock(Service.class);
+    when(yarnService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+
+    when(cluster.getServices()).thenReturn(ImmutableMap.<String, Service>builder()
+        .put("HBASE", hbaseService)
+        .put("ZOOKEEPER", zkService)
+        .put("YARN", yarnService)
+        .build());
+
+    RoleCommandOrder rco = roleCommandOrderProvider.getRoleCommandOrder(cluster);
+    RoleGraph rg = roleGraphFactory.createNew(rco);
+    long now = System.currentTimeMillis();
+    Stage stage = StageUtils.getATestStage(1, 1, "host1", "", "");
+    stage.addHostRoleExecutionCommand("host2", Role.HBASE_MASTER,
+        RoleCommand.STOP, new ServiceComponentHostStartEvent("HBASE_MASTER",
+            "host2", now), "cluster1", "HBASE", false, false);
+    stage.addHostRoleExecutionCommand("host3", Role.ZOOKEEPER_SERVER,
+        RoleCommand.STOP, new ServiceComponentHostStartEvent("ZOOKEEPER_SERVER",
+            "host3", now), "cluster1", "ZOOKEEPER", false, false);
+    stage.addHostRoleExecutionCommand("host2", Role.RESOURCEMANAGER,
+        RoleCommand.STOP, new ServiceComponentHostStartEvent("RESOURCEMANAGER",
+            "host4", now), "cluster1", "YARN", false, false);
+
+    log.info("Build and ready to detect circular dependencies - long chain");
+    rg.build(stage);
+    boolean exceptionThrown = false;
+    try {
+      List<Stage> outStages = rg.getStages();
+    } catch (AmbariException e) {
+      exceptionThrown = true;
+    }
+    Assert.assertTrue(exceptionThrown);
+  }
+
+  @Test
+  public void testSCCInGraphDetectedLongTwo() {
+    ClusterImpl cluster = mock(ClusterImpl.class);
+    when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6.1"));
+
+    Service hbaseService = mock(Service.class);
+    when(hbaseService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+    Service zkService = mock(Service.class);
+    when(zkService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6.1"));
+
+    when(cluster.getServices()).thenReturn(ImmutableMap.<String, Service>builder()
+        .put("HBASE", hbaseService)
+        .put("ZOOKEEPER", zkService)
+        .build());
+
+    RoleCommandOrder rco = roleCommandOrderProvider.getRoleCommandOrder(cluster);
+    RoleGraph rg = roleGraphFactory.createNew(rco);
+    long now = System.currentTimeMillis();
+    Stage stage = StageUtils.getATestStage(1, 1, "host1", "", "");
+    stage.addHostRoleExecutionCommand("host2", Role.HBASE_MASTER,
+        RoleCommand.UPGRADE, new ServiceComponentHostStartEvent("HBASE_MASTER",
+            "host2", now), "cluster1", "HBASE", false, false);
+    stage.addHostRoleExecutionCommand("host3", Role.ZOOKEEPER_SERVER,
+        RoleCommand.UPGRADE, new ServiceComponentHostStartEvent("ZOOKEEPER_SERVER",
+            "host3", now), "cluster1", "ZOOKEEPER", false, false);
+    stage.addHostRoleExecutionCommand("host2", Role.HBASE_REGIONSERVER,
+        RoleCommand.UPGRADE, new ServiceComponentHostStartEvent("HBASE_REGIONSERVER",
+            "host4", now), "cluster1", "HBASE", false, false);
+
+    log.info("Build and ready to detect circular dependencies - long chain");
+    rg.build(stage);
+    boolean exceptionThrown = false;
+    try {
+      List<Stage> outStages = rg.getStages();
+    } catch (AmbariException e) {
+      exceptionThrown = true;
+    }
+    Assert.assertTrue(exceptionThrown);
+  }
+
+  @Test
+  public void testNoSCCInGraphDetected() {
+    ClusterImpl cluster = mock(ClusterImpl.class);
+    when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6"));
+
+    Service hbaseService = mock(Service.class);
+    when(hbaseService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6"));
+    Service zkService = mock(Service.class);
+    when(zkService.getDesiredStackId()).thenReturn(new StackId("HDP-2.0.6"));
+
+    when(cluster.getServices()).thenReturn(ImmutableMap.<String, Service>builder()
+        .put("HBASE", hbaseService)
+        .put("ZOOKEEPER", zkService)
+        .build());
+
+    RoleCommandOrder rco = roleCommandOrderProvider.getRoleCommandOrder(cluster);
+    RoleGraph rg = roleGraphFactory.createNew(rco);
+    long now = System.currentTimeMillis();
+    Stage stage = StageUtils.getATestStage(1, 1, "host1", "", "");
+    stage.addHostRoleExecutionCommand("host2", Role.HBASE_MASTER,
+        RoleCommand.STOP, new ServiceComponentHostStartEvent("HBASE_MASTER",
+            "host2", now), "cluster1", "HBASE", false, false);
+    stage.addHostRoleExecutionCommand("host3", Role.ZOOKEEPER_SERVER,
+        RoleCommand.STOP, new ServiceComponentHostStartEvent("ZOOKEEPER_SERVER",
+            "host3", now), "cluster1", "ZOOKEEPER", false, false);
+    stage.addHostRoleExecutionCommand("host2", Role.HBASE_REGIONSERVER,
+        RoleCommand.STOP, new ServiceComponentHostStartEvent("HBASE_REGIONSERVER",
+            "host4", now), "cluster1", "HBASE", false, false);
+    log.info("Build and ready to detect circular dependencies");
+    rg.build(stage);
+    boolean exceptionThrown = false;
+    try {
+      List<Stage> outStages = rg.getStages();
+    } catch (AmbariException e) {
+      exceptionThrown = true;
+    }
+    Assert.assertFalse(exceptionThrown);
+  }
+
+  @Test
+  public void testMultiStagePlan() throws Throwable {
     ClusterImpl cluster = mock(ClusterImpl.class);
     when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6"));
 
@@ -121,19 +284,19 @@ public class TestStagePlanner {
     stage.addHostRoleExecutionCommand("host3", Role.ZOOKEEPER_SERVER,
         RoleCommand.START, new ServiceComponentHostStartEvent("ZOOKEEPER_SERVER",
             "host3", now), "cluster1", "ZOOKEEPER", false, false);
-    System.out.println(stage.toString());
+    log.info(stage.toString());
 
     rg.build(stage);
-    System.out.println(rg.stringifyGraph());
+    log.info(rg.stringifyGraph());
     List<Stage> outStages = rg.getStages();
     for (Stage s: outStages) {
-      System.out.println(s.toString());
+      log.info(s.toString());
     }
     assertEquals(3, outStages.size());
   }
 
   @Test
-  public void testRestartStagePlan() {
+  public void testRestartStagePlan() throws Throwable {
     ClusterImpl cluster = mock(ClusterImpl.class);
     when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6"));
 
@@ -158,19 +321,19 @@ public class TestStagePlanner {
       RoleCommand.CUSTOM_COMMAND, "cluster1",
       new ServiceComponentHostServerActionEvent("host2", System.currentTimeMillis()),
       null, "command detail", null, null, false, false);
-    System.out.println(stage.toString());
+    log.info(stage.toString());
 
     rg.build(stage);
-    System.out.println(rg.stringifyGraph());
+    log.info(rg.stringifyGraph());
     List<Stage> outStages = rg.getStages();
     for (Stage s: outStages) {
-      System.out.println(s.toString());
+      log.info(s.toString());
     }
     assertEquals(2, outStages.size());
   }
 
   @Test
-  public void testManyStages() {
+  public void testManyStages() throws Throwable {
     ClusterImpl cluster = mock(ClusterImpl.class);
     when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6"));
 
@@ -244,18 +407,18 @@ public class TestStagePlanner {
       RoleCommand.START, new ServiceComponentHostStartEvent("GANGLIA_SERVER",
         "host9", now), "cluster1", "GANGLIA", false, false);
 
-    System.out.println(stage.toString());
+    log.info(stage.toString());
     rg.build(stage);
-    System.out.println(rg.stringifyGraph());
+    log.info(rg.stringifyGraph());
     List<Stage> outStages = rg.getStages();
     for (Stage s : outStages) {
-      System.out.println(s.toString());
+      log.info(s.toString());
     }
     assertEquals(4, outStages.size());
   }
 
   @Test
-  public void testDependencyOrderedStageCreate() {
+  public void testDependencyOrderedStageCreate() throws Throwable {
     ClusterImpl cluster = mock(ClusterImpl.class);
     when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.0.6"));
     RoleCommandOrder rco = roleCommandOrderProvider.getRoleCommandOrder(cluster);
@@ -296,12 +459,12 @@ public class TestStagePlanner {
     stage.addHostRoleExecutionCommand("host9", Role.GANGLIA_SERVER,
       RoleCommand.START, new ServiceComponentHostStartEvent("GANGLIA_SERVER",
         "host9", now), "cluster1", "GANGLIA", false, false);
-    System.out.println(stage.toString());
+    log.info(stage.toString());
     rg.build(stage);
-    System.out.println(rg.stringifyGraph());
+    log.info(rg.stringifyGraph());
     List<Stage> outStages = rg.getStages();
     for (Stage s : outStages) {
-      System.out.println(s.toString());
+      log.info(s.toString());
     }
     assertEquals(1, outStages.size());
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/acb2f989/ambari-server/src/test/resources/stacks/HDP/2.0.6.1/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.6.1/role_command_order.json b/ambari-server/src/test/resources/stacks/HDP/2.0.6.1/role_command_order.json
index 6154004..4d63fe3 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.6.1/role_command_order.json
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.6.1/role_command_order.json
@@ -1,92 +1,17 @@
 {
-  "_comment" : "Record format:",
-  "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
   "general_deps" : {
-    "_comment" : "dependencies for all cases",
+    "_comment" : "circular dependencies, short link",
     "HBASE_MASTER-START": ["ZOOKEEPER_SERVER-START"],
-    "HBASE_REGIONSERVER-START": ["HBASE_MASTER-START"],
-    "OOZIE_SERVER-START": ["JOBTRACKER-START", "TASKTRACKER-START"],
-    "WEBHCAT_SERVER-START": ["TASKTRACKER-START", "HIVE_SERVER-START"],
-    "HIVE_METASTORE-START": ["MYSQL_SERVER-START"],
-    "HIVE_SERVER-START": ["TASKTRACKER-START", "MYSQL_SERVER-START"],
-    "HUE_SERVER-START": ["HIVE_SERVER-START", "HCAT-START", "OOZIE_SERVER-START"],
-    "FLUME_HANDLER-START": ["OOZIE_SERVER-START"],
-    "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
-    "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START", "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
-    "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
-    "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
-    "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
-    "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
-    "ZOOKEEPER_QUORUM_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
-    "ZOOKEEPER_SERVER-STOP" : ["HBASE_MASTER-STOP", "HBASE_REGIONSERVER-STOP"],
-    "HBASE_MASTER-STOP": ["HBASE_REGIONSERVER-STOP"],
-    "TASKTRACKER-UPGRADE": ["JOBTRACKER-UPGRADE"],
-    "MAPREDUCE_CLIENT-UPGRADE": ["TASKTRACKER-UPGRADE", "JOBTRACKER-UPGRADE"],
-    "ZOOKEEPER_SERVER-UPGRADE": ["MAPREDUCE_CLIENT-UPGRADE"],
-    "ZOOKEEPER_CLIENT-UPGRADE": ["ZOOKEEPER_SERVER-UPGRADE"],
-    "HBASE_MASTER-UPGRADE": ["ZOOKEEPER_CLIENT-UPGRADE"],
-    "HBASE_REGIONSERVER-UPGRADE": ["HBASE_MASTER-UPGRADE"],
-    "HBASE_CLIENT-UPGRADE": ["HBASE_REGIONSERVER-UPGRADE"],
-    "HIVE_SERVER-UPGRADE" : ["HBASE_CLIENT-UPGRADE"],
-    "HIVE_METASTORE-UPGRADE" : ["HIVE_SERVER-UPGRADE"],
-    "MYSQL_SERVER-UPGRADE": ["HIVE_METASTORE-UPGRADE"],
-    "HIVE_CLIENT-UPGRADE": ["MYSQL_SERVER-UPGRADE"],
-    "HCAT-UPGRADE": ["HIVE_CLIENT-UPGRADE"],
-    "OOZIE_SERVER-UPGRADE" : ["HCAT-UPGRADE"],
-    "OOZIE_CLIENT-UPGRADE" : ["OOZIE_SERVER-UPGRADE"],
-    "WEBHCAT_SERVER-UPGRADE" : ["OOZIE_CLIENT-UPGRADE"],
-    "PIG-UPGRADE" : ["WEBHCAT_SERVER-UPGRADE"],
-    "SQOOP-UPGRADE" : ["PIG-UPGRADE"],
-    "GANGLIA_SERVER-UPGRADE" : ["SQOOP-UPGRADE"],
-    "GANGLIA_MONITOR-UPGRADE" : ["GANGLIA_SERVER-UPGRADE"]
-  },
-  "_comment" : "GLUSTERFS-specific dependencies",
-  "optional_glusterfs": {
-    "HBASE_MASTER-START": ["PEERSTATUS-START"],
-    "JOBTRACKER-START": ["PEERSTATUS-START"],
-    "TASKTRACKER-START": ["PEERSTATUS-START"],
-    "GLUSTERFS_SERVICE_CHECK-SERVICE_CHECK": ["PEERSTATUS-START"],
-    "JOBTRACKER-UPGRADE": ["GLUSTERFS_CLIENT-UPGRADE"]
-  },
-  "_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
-  "optional_no_glusterfs": {
-    "SECONDARY_NAMENODE-START": ["NAMENODE-START"],
-    "RESOURCEMANAGER-START": ["NAMENODE-START", "DATANODE-START"],
-    "NODEMANAGER-START": ["NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START"],
-    "HISTORYSERVER-START": ["NAMENODE-START", "DATANODE-START"],
-    "HBASE_MASTER-START": ["NAMENODE-START", "DATANODE-START"],
-    "JOBTRACKER-START": ["NAMENODE-START", "DATANODE-START"],
-    "TASKTRACKER-START": ["NAMENODE-START", "DATANODE-START"],
-    "HIVE_SERVER-START": ["DATANODE-START"],
-    "WEBHCAT_SERVER-START": ["DATANODE-START"],
-    "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["NAMENODE-START", "DATANODE-START",
-        "SECONDARY_NAMENODE-START"],
-    "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START",
-        "RESOURCEMANAGER-START", "HISTORYSERVER-START", "YARN_SERVICE_CHECK-SERVICE_CHECK"],
-    "YARN_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
-    "RESOURCEMANAGER_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START"],
-    "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "NAMENODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
-        "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
-    "DATANODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
-        "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
-    "SECONDARY_NAMENODE-UPGRADE": ["NAMENODE-UPGRADE"],
-    "DATANODE-UPGRADE": ["SECONDARY_NAMENODE-UPGRADE"],
-    "HDFS_CLIENT-UPGRADE": ["DATANODE-UPGRADE"],
-    "JOBTRACKER-UPGRADE": ["HDFS_CLIENT-UPGRADE"]
-  },
-  "_comment" : "Dependencies that are used in HA NameNode cluster",
-  "namenode_optional_ha": {
-    "NAMENODE-START": ["JOURNALNODE-START", "ZOOKEEPER_SERVER-START"],
-    "ZKFC-START": ["NAMENODE-START"],
-    "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["ZKFC-START"]
-  },
-  "_comment" : "Dependencies that are used in ResourceManager HA cluster",
-  "resourcemanager_optional_ha" : {
-    "RESOURCEMANAGER-START": ["ZOOKEEPER_SERVER-START"]
-  }
-}
+    "ZOOKEEPER_SERVER-START": ["HBASE_MASTER-START"],
+
+    "_comment" : "circular dependencies, long link",
+    "ZOOKEEPER_SERVER-STOP" : ["HBASE_MASTER-STOP"],
+    "HBASE_MASTER-STOP": ["RESOURCEMANAGER-STOP"],
+    "RESOURCEMANAGER-STOP": ["ZOOKEEPER_SERVER-STOP"],
 
+    "_comment" : "circular dependencies, long link",
+    "ZOOKEEPER_SERVER-UPGRADE" : ["HBASE_MASTER-UPGRADE"],
+    "HBASE_MASTER-UPGRADE": ["HBASE_REGIONSERVER-UPGRADE"],
+    "HBASE_REGIONSERVER-UPGRADE": ["ZOOKEEPER_SERVER-UPGRADE"]
+  }
+}
\ No newline at end of file


[47/50] [abbrv] ambari git commit: AMBARI-21167. mpack install fails when using https links.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-21167. mpack install fails when using https links.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c7b77436
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c7b77436
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c7b77436

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: c7b77436477117050103b35d1e579d02a096d341
Parents: 219f223
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Jun 12 14:05:12 2017 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Jun 12 14:05:12 2017 +0300

----------------------------------------------------------------------
 .../main/python/ambari_commons/inet_utils.py    | 32 ++++++++++++++++++++
 .../main/python/ambari_server/setupMpacks.py    |  4 +--
 2 files changed, 34 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c7b77436/ambari-common/src/main/python/ambari_commons/inet_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/inet_utils.py b/ambari-common/src/main/python/ambari_commons/inet_utils.py
index 22eaaf5..5fd3e88 100644
--- a/ambari-common/src/main/python/ambari_commons/inet_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/inet_utils.py
@@ -23,10 +23,18 @@ import time
 import sys
 import urllib2
 import socket
+from ambari_commons import OSCheck
 from functools import wraps
 
 from exceptions import FatalException, NonFatalException, TimeoutError
 
+if OSCheck.is_windows_family():
+  from ambari_commons.os_windows import os_run_os_command
+else:
+  # MacOS not supported
+  from ambari_commons.os_linux import os_run_os_command
+  pass
+
 from logging_utils import *
 from os_check import OSCheck
 
@@ -59,6 +67,30 @@ def download_file(link, destination, chunk_size=16 * 1024, progress_func = None)
   force_download_file(link, destination, chunk_size, progress_func = progress_func)
 
 
+def download_file_anyway(link, destination, chunk_size=16 * 1024, progress_func = None):
+  print_info_msg("Trying to download {0} to {1} with python lib [urllib2].".format(link, destination))
+  if os.path.exists(destination):
+    print_warning_msg("File {0} already exists, assuming it was downloaded before".format(destination))
+    return
+  try:
+    force_download_file(link, destination, chunk_size, progress_func = progress_func)
+  except:
+    print_error_msg("Download {0} with python lib [urllib2] failed with error: {1}".format(link, str(sys.exc_info())))
+
+  if not os.path.exists(destination):
+    print "Trying to download {0} to {1} with [curl] command.".format(link, destination)
+    #print_info_msg("Trying to download {0} to {1} with [curl] command.".format(link, destination))
+    curl_command = "curl --fail -k -o %s %s" % (destination, link)
+    retcode, out, err = os_run_os_command(curl_command)
+    if retcode != 0:
+      print_error_msg("Download file {0} with [curl] command failed with error: {1}".format(link, out + err))
+
+
+  if not os.path.exists(destination):
+    print_error_msg("Unable to download file {0}!".format(link))
+    print "ERROR: unable to donwload file %s!" % (link)
+
+
 def download_progress(file_name, downloaded_size, blockSize, totalSize):
   percent = int(downloaded_size * 100 / totalSize)
   status = "\r" + file_name

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7b77436/ambari-server/src/main/python/ambari_server/setupMpacks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/setupMpacks.py b/ambari-server/src/main/python/ambari_server/setupMpacks.py
index 625e428..6f232f4 100755
--- a/ambari-server/src/main/python/ambari_server/setupMpacks.py
+++ b/ambari-server/src/main/python/ambari_server/setupMpacks.py
@@ -27,7 +27,7 @@ import logging
 from ambari_server.serverClassPath import ServerClassPath
 
 from ambari_commons.exceptions import FatalException
-from ambari_commons.inet_utils import download_file
+from ambari_commons.inet_utils import download_file, download_file_anyway
 from ambari_commons.logging_utils import print_info_msg, print_error_msg, print_warning_msg
 from ambari_commons.os_utils import copy_file, run_os_command, change_owner, set_file_permissions
 from ambari_server.serverConfiguration import get_ambari_properties, get_ambari_version, get_stack_location, \
@@ -137,7 +137,7 @@ def download_mpack(mpack_path):
     copy_file(mpack_path, tmp_archive_path)
   else:
     # remote path
-    download_file(mpack_path, tmp_archive_path)
+    download_file_anyway(mpack_path, tmp_archive_path)
   return tmp_archive_path
 
 def expand_mpack(archive_path):


[04/50] [abbrv] ambari git commit: AMBARI-21182. Agent Host Disk Usage Alert Hardcodes the Stack Directory (aonishuk)

Posted by nc...@apache.org.
AMBARI-21182. Agent Host Disk Usage Alert Hardcodes the Stack Directory (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/119d2624
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/119d2624
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/119d2624

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 119d2624f96d66c9a4d5d559ca436de73adae444
Parents: 4dba161
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Jun 6 13:57:52 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Jun 6 13:57:52 2017 +0300

----------------------------------------------------------------------
 .../resources/host_scripts/alert_disk_space.py  | 22 +++++++-------------
 1 file changed, 8 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/119d2624/ambari-server/src/main/resources/host_scripts/alert_disk_space.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/host_scripts/alert_disk_space.py b/ambari-server/src/main/resources/host_scripts/alert_disk_space.py
index d2b4f36..4c5834f 100644
--- a/ambari-server/src/main/resources/host_scripts/alert_disk_space.py
+++ b/ambari-server/src/main/resources/host_scripts/alert_disk_space.py
@@ -36,18 +36,14 @@ MIN_FREE_SPACE_DEFAULT = 5000000000L
 PERCENT_USED_WARNING_DEFAULT = 50
 PERCENT_USED_CRITICAL_DEFAULT = 80
 
-# the location where HDP installs components when using HDP 2.2+
-STACK_HOME_DIR = "/usr/hdp"
-
-# the location where HDP installs components when using HDP 2.0 to 2.1
-STACK_HOME_LEGACY_DIR = "/usr/lib"
+STACK_ROOT = '{{cluster-env/stack_root}}'
 
 def get_tokens():
   """
   Returns a tuple of tokens in the format {{site/property}} that will be used
   to build the dictionary passed into execute
   """
-  return None
+  return (STACK_ROOT, )
 
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
@@ -65,15 +61,13 @@ def execute(configurations={}, parameters={}, host_name=None):
   host_name (string): the name of this host where the alert is running
   """
 
-  # determine the location of HDP home
-  stack_home = None
-  if os.path.isdir(STACK_HOME_DIR):
-    stack_home = STACK_HOME_DIR
-  elif os.path.isdir(STACK_HOME_LEGACY_DIR):
-    stack_home = STACK_HOME_LEGACY_DIR
+  if configurations is None:
+    return (('UNKNOWN', ['There were no configurations supplied to the script.']))
+
+  if not STACK_ROOT in configurations:
+    return (('STACK_ROOT', ['cluster-env/stack_root is not specified']))
 
-  # if stack home was found, use it; otherwise default to None
-  path = stack_home if stack_home is not None else None
+  path = configurations[STACK_ROOT]
 
   try:
     disk_usage = _get_disk_usage(path)


[46/50] [abbrv] ambari git commit: AMBARI-19149. Code cleanup: concatenation in debug messages, unnecessary toString calls

Posted by nc...@apache.org.
AMBARI-19149. Code cleanup: concatenation in debug messages, unnecessary toString calls


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/219f2234
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/219f2234
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/219f2234

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 219f22345e6f7b7ad38cdaac02e9bbe8c8ad7254
Parents: d510522
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Wed May 31 10:25:58 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Mon Jun 12 11:30:03 2017 +0200

----------------------------------------------------------------------
 ambari-server/pom.xml                           |  4 --
 .../server/KdcServerConnectionVerification.java |  2 +-
 .../ambari/server/StateRecoveryManager.java     |  2 +-
 .../actionmanager/ActionDBAccessorImpl.java     |  6 +-
 .../server/actionmanager/ActionManager.java     |  8 +--
 .../server/actionmanager/ActionScheduler.java   | 18 +++--
 .../ambari/server/actionmanager/Stage.java      |  6 +-
 .../apache/ambari/server/agent/ActionQueue.java |  2 +-
 .../ambari/server/agent/AgentRequests.java      | 10 +--
 .../ambari/server/agent/ExecutionCommand.java   |  6 +-
 .../ambari/server/agent/HeartBeatHandler.java   | 17 ++---
 .../ambari/server/agent/HeartbeatMonitor.java   | 14 ++--
 .../ambari/server/agent/HeartbeatProcessor.java |  4 +-
 .../apache/ambari/server/agent/HostInfo.java    |  2 +-
 .../apache/ambari/server/agent/Register.java    |  2 +-
 .../ambari/server/agent/rest/AgentResource.java | 18 ++---
 .../ambari/server/api/query/QueryImpl.java      |  4 +-
 .../server/api/rest/BootStrapResource.java      |  6 +-
 .../server/api/services/AmbariMetaInfo.java     | 12 ++--
 .../ambari/server/api/services/BaseRequest.java |  2 +-
 .../api/services/PersistKeyValueService.java    | 16 ++---
 .../services/parsers/JsonRequestBodyParser.java |  2 +-
 .../StackAdvisorBlueprintProcessor.java         |  2 +-
 .../commands/StackAdvisorCommand.java           | 10 +--
 .../views/ViewDataMigrationService.java         |  6 +-
 .../server/bootstrap/BSHostStatusCollector.java | 12 ++--
 .../ambari/server/bootstrap/BSRunner.java       | 14 ++--
 .../ambari/server/bootstrap/BootStrapImpl.java  |  6 +-
 .../server/checks/UpgradeCheckRegistry.java     |  4 +-
 .../controller/AmbariActionExecutionHelper.java |  9 +--
 .../AmbariCustomCommandExecutionHelper.java     | 10 ++-
 .../server/controller/AmbariHandlerList.java    |  2 +-
 .../AmbariManagementControllerImpl.java         | 75 +++++++-------------
 .../ambari/server/controller/AmbariServer.java  |  2 +-
 .../server/controller/ControllerModule.java     |  2 +-
 .../internal/AbstractPropertyProvider.java      |  4 +-
 .../internal/AbstractProviderModule.java        | 15 ++--
 .../internal/ActionResourceProvider.java        |  3 +-
 .../controller/internal/AppCookieManager.java   |  8 +--
 .../controller/internal/BaseProvider.java       | 10 +--
 .../BlueprintConfigurationProcessor.java        |  9 ++-
 .../internal/ClientConfigResourceProvider.java  |  2 +-
 .../internal/ClusterControllerImpl.java         |  4 +-
 .../internal/ClusterResourceProvider.java       |  6 +-
 ...atibleRepositoryVersionResourceProvider.java |  2 +-
 .../internal/ComponentResourceProvider.java     |  6 +-
 .../internal/ConfigGroupResourceProvider.java   | 12 ++--
 .../internal/GroupResourceProvider.java         |  4 +-
 .../internal/HostComponentResourceProvider.java |  2 +-
 .../internal/HostResourceProvider.java          | 12 ++--
 .../controller/internal/HostStatusHelper.java   |  2 +-
 .../internal/JobResourceProvider.java           | 12 ++--
 .../internal/MemberResourceProvider.java        |  4 +-
 .../PreUpgradeCheckResourceProvider.java        |  2 +-
 .../internal/RequestResourceProvider.java       |  6 +-
 .../internal/RequestStageContainer.java         |  2 +-
 .../internal/ServiceResourceProvider.java       | 53 ++++----------
 .../internal/TaskAttemptResourceProvider.java   |  6 +-
 .../controller/internal/URLStreamProvider.java  | 10 +--
 .../internal/UserResourceProvider.java          |  4 +-
 .../internal/WidgetLayoutResourceProvider.java  |  4 +-
 .../internal/WorkflowResourceProvider.java      |  6 +-
 .../server/controller/jmx/JMXMetricHolder.java  |  2 +-
 .../logging/LogSearchDataRetrievalService.java  |  4 +-
 .../logging/LoggingRequestHelperImpl.java       | 15 ++--
 .../logging/LoggingSearchPropertyProvider.java  |  4 +-
 .../metrics/MetricsCollectorHAManager.java      |  2 +-
 .../metrics/MetricsDownsamplingMethod.java      |  5 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  2 +-
 .../metrics/timeline/MetricsRequestHelper.java  | 20 +++---
 .../timeline/cache/TimelineMetricCache.java     | 28 +++-----
 .../cache/TimelineMetricCacheEntryFactory.java  | 46 +++++-------
 .../cache/TimelineMetricsCacheSizeOfEngine.java |  6 +-
 .../controller/utilities/KerberosChecker.java   |  2 +-
 .../ServiceCalculatedStateFactory.java          |  2 +-
 .../server/credentialapi/CredentialUtil.java    |  2 +-
 .../customactions/ActionDefinitionManager.java  |  6 +-
 .../alerts/AlertHashInvalidationListener.java   |  2 +-
 .../alerts/AlertLifecycleListener.java          |  2 +-
 .../alerts/AlertMaintenanceModeListener.java    |  2 +-
 .../AlertServiceComponentHostListener.java      |  2 +-
 .../alerts/AlertServiceStateListener.java       |  2 +-
 .../alerts/AlertStateChangedListener.java       |  2 +-
 .../CachedRoleCommandOrderProvider.java         |  2 +-
 .../server/metadata/RoleCommandOrder.java       |  7 +-
 .../system/impl/AmbariPerformanceMonitor.java   |  4 +-
 .../system/impl/DatabaseMetricsSource.java      |  2 +-
 .../metrics/system/impl/JvmMetricsSource.java   |  2 +-
 .../system/impl/MetricsConfiguration.java       |  2 +-
 .../metrics/system/impl/MetricsServiceImpl.java |  2 +-
 .../ambari/server/orm/DBAccessorImpl.java       |  2 +-
 .../server/orm/dao/AlertDefinitionDAO.java      |  2 +-
 .../orm/entities/RepositoryVersionEntity.java   |  2 +-
 .../orm/helpers/dbms/GenericDbmsHelper.java     |  4 +-
 .../server/resources/ResourceManager.java       | 11 ++-
 .../server/resources/api/rest/GetResource.java  |  9 ++-
 .../scheduler/AbstractLinearExecutionJob.java   |  6 +-
 .../scheduler/ExecutionScheduleManager.java     | 12 ++--
 .../scheduler/ExecutionSchedulerImpl.java       |  4 +-
 .../server/security/CertificateManager.java     |  6 +-
 .../ambari/server/security/SecurityFilter.java  | 12 ++--
 .../AmbariLdapBindAuthenticator.java            |  2 +-
 .../AmbariPamAuthenticationProvider.java        |  2 +-
 .../jwt/JwtAuthenticationFilter.java            |  2 +-
 .../encryption/MasterKeyServiceImpl.java        |  8 +--
 .../unsecured/rest/CertificateDownload.java     |  6 +-
 .../unsecured/rest/CertificateSign.java         |  6 +-
 .../security/unsecured/rest/ConnectionInfo.java |  6 +-
 .../kerberos/ADKerberosOperationHandler.java    |  6 +-
 .../kerberos/CreateKeytabFilesServerAction.java |  6 +-
 .../kerberos/DestroyPrincipalsServerAction.java |  2 +-
 .../kerberos/IPAKerberosOperationHandler.java   | 18 ++---
 .../kerberos/MITKerberosOperationHandler.java   |  2 +-
 .../serveraction/upgrades/ConfigureAction.java  |  2 +-
 .../upgrades/FinalizeUpgradeAction.java         |  2 +-
 .../upgrades/UpdateDesiredStackAction.java      |  2 +-
 .../ambari/server/stack/ExtensionDirectory.java |  2 +-
 .../ambari/server/stack/ExtensionModule.java    |  4 +-
 .../ambari/server/stack/MasterHostResolver.java |  2 +-
 .../ambari/server/stack/ServiceDirectory.java   |  2 +-
 .../ambari/server/stack/ServiceModule.java      |  4 +-
 .../ambari/server/stack/StackDirectory.java     |  7 +-
 .../ambari/server/stack/StackManager.java       |  9 +--
 .../apache/ambari/server/stack/StackModule.java | 20 ++----
 .../ambari/server/stageplanner/RoleGraph.java   |  2 +-
 .../apache/ambari/server/state/CheckHelper.java |  2 +-
 .../server/state/ServiceComponentImpl.java      | 21 ++----
 .../apache/ambari/server/state/ServiceImpl.java | 21 ++----
 .../apache/ambari/server/state/StackInfo.java   |  2 +-
 .../ambari/server/state/action/ActionImpl.java  | 23 +++---
 .../server/state/cluster/ClusterImpl.java       | 45 +++++-------
 .../server/state/cluster/ClustersImpl.java      |  6 +-
 .../ambari/server/state/host/HostImpl.java      | 31 +++-----
 .../server/state/stack/ConfigUpgradePack.java   |  2 +-
 .../state/stack/StackRoleCommandOrder.java      |  2 +-
 .../ambari/server/state/stack/UpgradePack.java  |  2 +-
 .../state/stack/upgrade/ColocatedGrouping.java  |  2 +-
 .../upgrade/ConfigUpgradeChangeDefinition.java  |  2 +-
 .../state/stack/upgrade/ConfigureTask.java      |  2 +-
 .../state/stack/upgrade/HostOrderGrouping.java  |  2 +-
 .../state/stack/upgrade/RestartGrouping.java    |  2 +-
 .../stack/upgrade/ServiceCheckGrouping.java     |  2 +-
 .../state/stack/upgrade/StartGrouping.java      |  2 +-
 .../state/stack/upgrade/StopGrouping.java       |  2 +-
 .../state/stack/upgrade/TaskWrapperBuilder.java |  2 +-
 .../svccomphost/ServiceComponentHostImpl.java   | 18 ++---
 .../server/topology/BlueprintValidatorImpl.java |  2 +-
 .../topology/tasks/ConfigureClusterTask.java    |  2 +-
 .../server/topology/tasks/TopologyHostTask.java |  4 +-
 .../validators/ClusterConfigTypeValidator.java  |  2 +-
 .../server/upgrade/AbstractUpgradeCatalog.java  |  2 +-
 .../server/upgrade/SchemaUpgradeHelper.java     |  2 +-
 .../server/upgrade/UpgradeCatalog222.java       |  2 +-
 .../server/upgrade/UpgradeCatalog250.java       | 10 +--
 .../apache/ambari/server/utils/AmbariPath.java  |  2 +-
 .../ambari/server/utils/CommandUtils.java       |  2 +-
 .../ambari/server/utils/SecretReference.java    |  2 +-
 .../ambari/server/utils/ShellCommandUtil.java   |  6 +-
 .../apache/ambari/server/utils/StageUtils.java  |  6 +-
 .../server/view/HttpImpersonatorImpl.java       |  2 +-
 .../server/view/ViewAmbariStreamProvider.java   |  2 +-
 .../ambari/server/view/ViewContextImpl.java     |  6 +-
 .../view/ViewDataMigrationContextImpl.java      |  6 +-
 .../server/view/ViewDataMigrationUtility.java   |  4 +-
 .../server/view/ViewDirectoryWatcher.java       |  6 +-
 .../ambari/server/view/ViewExtractor.java       |  8 +--
 .../apache/ambari/server/view/ViewRegistry.java | 27 ++++---
 .../server/view/ViewURLStreamProvider.java      | 14 ++--
 .../ambari/server/agent/AgentResourceTest.java  |  6 +-
 .../server/agent/LocalAgentSimulator.java       |  6 +-
 .../ambari/server/agent/TestActionQueue.java    |  2 +-
 .../server/agent/TestHeartbeatHandler.java      |  4 +-
 .../server/api/services/AmbariMetaInfoTest.java |  4 +-
 .../server/api/services/PersistServiceTest.java |  6 +-
 .../server/bootstrap/BootStrapResourceTest.java |  6 +-
 .../ambari/server/bootstrap/BootStrapTest.java  |  6 +-
 .../AmbariManagementControllerTest.java         | 17 +++--
 .../AbstractJDBCResourceProviderTest.java       |  2 +-
 .../CredentialResourceProviderTest.java         | 10 +--
 .../metrics/ganglia/GangliaMetricTest.java      |  2 +-
 .../apache/ambari/server/orm/OrmTestHelper.java |  2 +-
 .../orm/dao/RepositoryVersionDAOTest.java       |  2 +-
 .../server/security/CertGenerationTest.java     |  6 +-
 .../server/security/SslExecutionTest.java       |  6 +-
 .../encryption/MasterKeyServiceTest.java        |  6 +-
 .../ambari/server/state/host/HostTest.java      |  6 +-
 .../svccomphost/ServiceComponentHostTest.java   |  5 +-
 .../server/testing/DBInconsistencyTests.java    |  2 +-
 .../ambari/server/upgrade/UpgradeTest.java      |  2 +-
 189 files changed, 555 insertions(+), 765 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 669f90f..d2b65af 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -1348,10 +1348,6 @@
       <version>${jetty.version}</version>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
       <groupId>commons-codec</groupId>
       <artifactId>commons-codec</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/KdcServerConnectionVerification.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/KdcServerConnectionVerification.java b/ambari-server/src/main/java/org/apache/ambari/server/KdcServerConnectionVerification.java
index 19b13aa..1f0403b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/KdcServerConnectionVerification.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/KdcServerConnectionVerification.java
@@ -57,7 +57,7 @@ import com.google.inject.Singleton;
 @Singleton
 public class KdcServerConnectionVerification {
 
-  private static Logger LOG = LoggerFactory.getLogger(KdcServerConnectionVerification.class);
+  private static final Logger LOG = LoggerFactory.getLogger(KdcServerConnectionVerification.class);
 
   private Configuration config;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/StateRecoveryManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/StateRecoveryManager.java b/ambari-server/src/main/java/org/apache/ambari/server/StateRecoveryManager.java
index c92536e..03990a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/StateRecoveryManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/StateRecoveryManager.java
@@ -36,7 +36,7 @@ import com.google.inject.Inject;
  */
 public class StateRecoveryManager {
 
-  private static Logger LOG = LoggerFactory.getLogger(StateRecoveryManager.class);
+  private static final Logger LOG = LoggerFactory.getLogger(StateRecoveryManager.class);
 
   @Inject
   private HostVersionDAO hostVersionDAO;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
index 982316d..94bb892 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
@@ -579,9 +579,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
                                   long stageId, String role, CommandReport report) {
     boolean checkRequest = false;
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Update HostRoleState: "
-        + "HostName " + hostname + " requestId " + requestId + " stageId "
-        + stageId + " role " + role + " report " + report);
+      LOG.debug("Update HostRoleState: HostName {} requestId {} stageId {} role {} report {}", hostname, requestId, stageId, role, report);
     }
 
     long now = System.currentTimeMillis();
@@ -958,7 +956,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
         .withTaskId(String.valueOf(commandEntity.getTaskId()))
         .withHostName(commandEntity.getHostName())
         .withUserName(details.getUserName())
-        .withOperation(commandEntity.getRoleCommand().toString() + " " + commandEntity.getRole().toString())
+        .withOperation(commandEntity.getRoleCommand() + " " + commandEntity.getRole())
         .withDetails(commandEntity.getCommandDetail())
         .withStatus(commandEntity.getStatus().toString())
         .withRequestId(String.valueOf(requestId))

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java
index 3182b50..e809980 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java
@@ -43,7 +43,7 @@ import com.google.inject.Singleton;
  */
 @Singleton
 public class ActionManager {
-  private static Logger LOG = LoggerFactory.getLogger(ActionManager.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ActionManager.class);
   private final ActionScheduler scheduler;
   private final ActionDBAccessor db;
   private final AtomicLong requestCounter;
@@ -84,10 +84,10 @@ public class ActionManager {
 
   public void sendActions(Request request, ExecuteActionRequest executeActionRequest) throws AmbariException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug(String.format("Persisting Request into DB: %s", request));
+      LOG.debug("Persisting Request into DB: {}", request);
 
       if (executeActionRequest != null) {
-        LOG.debug("In response to request: " + request.toString());
+        LOG.debug("In response to request: {}", request);
       }
     }
     db.persistActions(request);
@@ -147,7 +147,7 @@ public class ActionManager {
     for (CommandReport report : reports) {
       HostRoleCommand command = commands.get(report.getTaskId());
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Processing command report : " + report.toString());
+        LOG.debug("Processing command report : {}", report);
       }
       if (command == null) {
         LOG.warn("The task " + report.getTaskId()

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index e779f78..9a45d1f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -101,7 +101,7 @@ import com.google.inject.persist.UnitOfWork;
 @Singleton
 class ActionScheduler implements Runnable {
 
-  private static Logger LOG = LoggerFactory.getLogger(ActionScheduler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ActionScheduler.class);
 
   public static final String FAILED_TASK_ABORT_REASONING =
     "Server considered task failed and automatically aborted it";
@@ -395,7 +395,7 @@ class ActionScheduler implements Runnable {
         // Check if we can process this stage in parallel with another stages
         i_stage++;
         long requestId = stage.getRequestId();
-        LOG.debug("==> STAGE_i = " + i_stage + "(requestId=" + requestId + ",StageId=" + stage.getStageId() + ")");
+        LOG.debug("==> STAGE_i = {}(requestId={},StageId={})", i_stage, requestId, stage.getStageId());
 
         RequestEntity request = db.getRequestEntity(requestId);
 
@@ -735,7 +735,7 @@ class ActionScheduler implements Runnable {
       }
 
       int i_my = 0;
-      LOG.trace("===>host=" + host);
+      LOG.trace("===>host={}", host);
 
       for (ExecutionCommandWrapper wrapper : commandWrappers) {
         ExecutionCommand c = wrapper.getExecutionCommand();
@@ -743,8 +743,7 @@ class ActionScheduler implements Runnable {
         HostRoleStatus status = s.getHostRoleStatus(host, roleStr);
         i_my++;
         if (LOG.isTraceEnabled()) {
-          LOG.trace("Host task " + i_my + ") id = " + c.getTaskId() + " status = " + status.toString() +
-            " (role=" + roleStr + "), roleCommand = " + c.getRoleCommand());
+          LOG.trace("Host task {}) id = {} status = {} (role={}), roleCommand = {}", i_my, c.getTaskId(), status, roleStr, c.getRoleCommand());
         }
         boolean hostDeleted = false;
         if (null != cluster) {
@@ -789,7 +788,7 @@ class ActionScheduler implements Runnable {
             commandTimeout += Long.parseLong(timeoutStr) * 1000; // Converting to milliseconds
           } else {
             LOG.error("Execution command has no timeout parameter" +
-              c.toString());
+              c);
           }
         }
 
@@ -853,7 +852,7 @@ class ActionScheduler implements Runnable {
 
             // reschedule command
             commandsToSchedule.add(c);
-            LOG.trace("===> commandsToSchedule(reschedule)=" + commandsToSchedule.size());
+            LOG.trace("===> commandsToSchedule(reschedule)={}", commandsToSchedule.size());
           }
         } else if (status.equals(HostRoleStatus.PENDING)) {
           // in case of DEPENDENCY_ORDERED stage command can be scheduled only if all of it's dependencies are
@@ -865,7 +864,7 @@ class ActionScheduler implements Runnable {
 
             //Need to schedule first time
             commandsToSchedule.add(c);
-            LOG.trace("===>commandsToSchedule(first_time)=" + commandsToSchedule.size());
+            LOG.trace("===>commandsToSchedule(first_time)={}", commandsToSchedule.size());
           }
         }
 
@@ -962,8 +961,7 @@ class ActionScheduler implements Runnable {
       }
 
     } catch (ServiceComponentNotFoundException scnex) {
-      LOG.debug(componentName + " associated with service " + serviceName +
-        " is not a service component, assuming it's an action.");
+      LOG.debug("{} associated with service {} is not a service component, assuming it's an action.", componentName, serviceName);
     } catch (ServiceComponentHostNotFoundException e) {
       String msg = String.format("Service component host %s not found, " +
               "unable to transition to failed state.", componentName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
index 5638439..3be66d4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
@@ -67,7 +67,7 @@ public class Stage {
    */
   public static final String INTERNAL_HOSTNAME = "_internal_ambari";
 
-  private static Logger LOG = LoggerFactory.getLogger(Stage.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Stage.class);
   private final long requestId;
   private String clusterName;
   private long clusterId = -1L;
@@ -853,7 +853,7 @@ public class Stage {
             summaryTaskTimeoutForHost += commandTimeout;
           } else {
             LOG.error("Execution command has no timeout parameter" +
-                    command.toString());
+              command);
           }
         }
         if (summaryTaskTimeoutForHost > stageTimeout) {
@@ -936,7 +936,7 @@ public class Stage {
       builder.append("HOST: ").append(hostRoleCommand.getHostName()).append(" :\n");
       builder.append(hostRoleCommand.getExecutionCommandWrapper().getJson());
       builder.append("\n");
-      builder.append(hostRoleCommand.toString());
+      builder.append(hostRoleCommand);
       builder.append("\n");
     }
     builder.append("STAGE DESCRIPTION END\n");

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
index 02f2dc8..1e13e25 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
@@ -38,7 +38,7 @@ import com.google.inject.Singleton;
 @Singleton
 public class ActionQueue {
 
-  private static Logger LOG = LoggerFactory.getLogger(ActionQueue.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ActionQueue.class);
 
   private static HashSet<String> EMPTY_HOST_LIST = new HashSet<>();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentRequests.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentRequests.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentRequests.java
index 5ff1564..8457bb8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentRequests.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentRequests.java
@@ -22,8 +22,8 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Singleton;
 
@@ -34,7 +34,7 @@ import com.google.inject.Singleton;
 
 @Singleton
 public class AgentRequests {
-  private static Log LOG = LogFactory.getLog(HeartbeatMonitor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HeartbeatMonitor.class);
   private final Map<String, Map<String, Boolean>> requiresExecCmdDetails = new HashMap<>();
   private final Object _lock = new Object();
 
@@ -60,7 +60,7 @@ public class AgentRequests {
 
     Map<String, Boolean> perHostRequiresExecCmdDetails = getPerHostRequiresExecCmdDetails(host);
     if (perHostRequiresExecCmdDetails != null && perHostRequiresExecCmdDetails.containsKey(component)) {
-      LOG.debug("Sending exec command details for " + component);
+      LOG.debug("Sending exec command details for {}", component);
       return perHostRequiresExecCmdDetails.get(component);
     }
 
@@ -81,6 +81,6 @@ public class AgentRequests {
 
   @Override
   public String toString() {
-    return new StringBuilder().append("requiresExecCmdDetails: ").append(requiresExecCmdDetails.toString()).toString();
+    return new StringBuilder().append("requiresExecCmdDetails: ").append(requiresExecCmdDetails).toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 17b89b7..3a650ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -29,8 +29,8 @@ import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.gson.annotations.SerializedName;
 
@@ -41,7 +41,7 @@ import com.google.gson.annotations.SerializedName;
  */
 public class ExecutionCommand extends AgentCommand {
 
-  private static Log LOG = LogFactory.getLog(ExecutionCommand.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ExecutionCommand.class);
 
   public ExecutionCommand() {
     super(AgentCommandType.EXECUTION_COMMAND);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index fc6e7a7..89ec963 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -155,10 +155,7 @@ public class HeartBeatHandler {
       return createRegisterCommand();
     }
 
-    LOG.debug("Received heartbeat from host"
-        + ", hostname=" + hostname
-        + ", currentResponseId=" + currentResponseId
-        + ", receivedResponseId=" + heartbeat.getResponseId());
+    LOG.debug("Received heartbeat from host, hostname={}, currentResponseId={}, receivedResponseId={}", hostname, currentResponseId, heartbeat.getResponseId());
 
     if (heartbeat.getResponseId() == currentResponseId - 1) {
       HeartBeatResponse heartBeatResponse = hostResponses.get(hostname);
@@ -249,7 +246,7 @@ public class HeartBeatHandler {
         response.setRecoveryConfig(rc);
 
         if (response.getRecoveryConfig() != null) {
-          LOG.info("Recovery configuration set to {}", response.getRecoveryConfig().toString());
+          LOG.info("Recovery configuration set to {}", response.getRecoveryConfig());
         }
       }
     }
@@ -268,7 +265,7 @@ public class HeartBeatHandler {
 
 
   protected void processRecoveryReport(RecoveryReport recoveryReport, String hostname) throws AmbariException {
-    LOG.debug("Received recovery report: " + recoveryReport.toString());
+    LOG.debug("Received recovery report: {}", recoveryReport);
     Host host = clusterFsm.getHost(hostname);
     host.setRecoveryReport(recoveryReport);
   }
@@ -283,7 +280,7 @@ public class HeartBeatHandler {
       for (AgentCommand ac : cmds) {
         try {
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Sending command string = " + StageUtils.jaxbToString(ac));
+            LOG.debug("Sending command string = {}", StageUtils.jaxbToString(ac));
           }
         } catch (Exception e) {
           throw new AmbariException("Could not get jaxb string for command", e);
@@ -446,7 +443,7 @@ public class HeartBeatHandler {
 
     response.setAgentConfig(config.getAgentConfigsMap());
     if(response.getAgentConfig() != null) {
-      LOG.debug("Agent configuration map set to " + response.getAgentConfig());
+      LOG.debug("Agent configuration map set to {}", response.getAgentConfig());
     }
 
     /*
@@ -464,7 +461,7 @@ public class HeartBeatHandler {
       response.setRecoveryConfig(rc);
 
       if(response.getRecoveryConfig() != null) {
-        LOG.info("Recovery configuration set to " + response.getRecoveryConfig().toString());
+        LOG.info("Recovery configuration set to " + response.getRecoveryConfig());
       }
     }
 
@@ -495,7 +492,7 @@ public class HeartBeatHandler {
     }
 
     if(actionQueue.hasPendingTask(hostname)) {
-      LOG.debug("Host " + hostname + " has pending tasks");
+      LOG.debug("Host {} has pending tasks", hostname);
       response.setHasPendingTasks(true);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 6d65bbd..d83a5d1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -58,8 +58,8 @@ import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.host.HostHeartbeatLostEvent;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Injector;
 
@@ -67,7 +67,7 @@ import com.google.inject.Injector;
  * Monitors the node state and heartbeats.
  */
 public class HeartbeatMonitor implements Runnable {
-  private static Log LOG = LogFactory.getLog(HeartbeatMonitor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HeartbeatMonitor.class);
   private Clusters clusters;
   private ActionQueue actionQueue;
   private ActionManager actionManager;
@@ -120,8 +120,7 @@ public class HeartbeatMonitor implements Runnable {
     while (shouldRun) {
       try {
         doWork();
-        LOG.trace("Putting monitor to sleep for " + threadWakeupInterval + " " +
-          "milliseconds");
+        LOG.trace("Putting monitor to sleep for {} milliseconds", threadWakeupInterval);
         Thread.sleep(threadWakeupInterval);
       } catch (InterruptedException ex) {
         LOG.warn("Scheduler thread is interrupted going to stop", ex);
@@ -193,8 +192,7 @@ public class HeartbeatMonitor implements Runnable {
 
       // Get status of service components
       List<StatusCommand> cmds = generateStatusCommands(hostname);
-      LOG.trace("Generated " + cmds.size() + " status commands for host: " +
-        hostname);
+      LOG.trace("Generated {} status commands for host: {}", cmds.size(), hostname);
       if (cmds.isEmpty()) {
         // Nothing to do
       } else {
@@ -352,7 +350,7 @@ public class HeartbeatMonitor implements Runnable {
     if (statusCmd.getPayloadLevel() == StatusCommand.StatusCommandPayload.EXECUTION_COMMAND) {
       ExecutionCommand ec = ambariManagementController.getExecutionCommand(cluster, sch, RoleCommand.START);
       statusCmd.setExecutionCommand(ec);
-      LOG.debug(componentName + " has more payload for execution command");
+      LOG.debug("{} has more payload for execution command", componentName);
     }
 
     return statusCmd;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
index d6ae260..a08abab 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
@@ -374,7 +374,7 @@ public class HeartbeatProcessor extends AbstractService{
         }
       }
 
-      LOG.debug("Received command report: " + report);
+      LOG.debug("Received command report: {}", report);
 
       // get this locally; don't touch the database
       Host host = clusterFsm.getHost(hostname);
@@ -463,7 +463,7 @@ public class HeartbeatProcessor extends AbstractService{
         throw new AmbariException("Invalid command report, service: " + service);
       }
       if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
-        LOG.debug(report.getRole() + " is an action - skip component lookup");
+        LOG.debug("{} is an action - skip component lookup", report.getRole());
       } else {
         try {
           Service svc = cl.getService(service);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java
index ce16f14..6673bc7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java
@@ -378,7 +378,7 @@ public class HostInfo {
     }
     StringBuilder ret = new StringBuilder();
     for (DiskInfo diskInfo : mounts) {
-      ret.append("(").append(diskInfo.toString()).append(")");
+      ret.append("(").append(diskInfo).append(")");
     }
     return ret.toString();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/Register.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/Register.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/Register.java
index ec41a7b..83fef03 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/Register.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/Register.java
@@ -115,7 +115,7 @@ public class Register {
              "prefix=" + prefix + "\n";
 
     if (hardwareProfile != null)
-      ret = ret + "hardwareprofile=" + this.hardwareProfile.toString();
+      ret = ret + "hardwareprofile=" + this.hardwareProfile;
     return ret;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java
index 0690f7b..a332459 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java
@@ -38,8 +38,8 @@ import org.apache.ambari.server.agent.Register;
 import org.apache.ambari.server.agent.RegistrationResponse;
 import org.apache.ambari.server.agent.RegistrationStatus;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 
@@ -52,7 +52,7 @@ import com.google.inject.Inject;
 @Path("/")
 public class AgentResource {
   private static HeartBeatHandler hh;
-  private static Log LOG = LogFactory.getLog(AgentResource.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AgentResource.class);
 
   @Inject
   public static void init(HeartBeatHandler instance) {
@@ -92,7 +92,7 @@ public class AgentResource {
     RegistrationResponse response = null;
     try {
       response = hh.handleRegistration(message);
-      LOG.debug("Sending registration response " + response);
+      LOG.debug("Sending registration response {}", response);
     } catch (AmbariException ex) {
       response = new RegistrationResponse();
       response.setResponseId(-1);
@@ -122,14 +122,14 @@ public class AgentResource {
   public HeartBeatResponse heartbeat(HeartBeat message)
       throws WebApplicationException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Received Heartbeat message " + message);
+      LOG.debug("Received Heartbeat message {}", message);
     }
     HeartBeatResponse heartBeatResponse;
     try {
       heartBeatResponse = hh.handleHeartBeat(message);
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Sending heartbeat response with response id " + heartBeatResponse.getResponseId());
-        LOG.debug("Response details " + heartBeatResponse);
+        LOG.debug("Sending heartbeat response with response id {}", heartBeatResponse.getResponseId());
+        LOG.debug("Response details {}", heartBeatResponse);
       }
     } catch (Exception e) {
       LOG.warn("Error in HeartBeat", e);
@@ -155,7 +155,7 @@ public class AgentResource {
   public ComponentsResponse components(
       @PathParam("clusterName") String clusterName) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Received Components request for cluster " + clusterName);
+      LOG.debug("Received Components request for cluster {}", clusterName);
     }
 
     ComponentsResponse componentsResponse;
@@ -164,7 +164,7 @@ public class AgentResource {
       componentsResponse = hh.handleComponents(clusterName);
       if (LOG.isDebugEnabled()) {
         LOG.debug("Sending components response");
-        LOG.debug("Response details " + componentsResponse);
+        LOG.debug("Response details {}", componentsResponse);
       }
     } catch (Exception e) {
       LOG.warn("Error in Components", e);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java
index 1387ded..25fd143 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java
@@ -523,7 +523,7 @@ public class QueryImpl implements Query, ResourceInstance {
       NoSuchParentResourceException {
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Executing resource query: " + request + " where " + predicate);
+      LOG.debug("Executing resource query: {} where {}", request, predicate);
     }
 
     QueryResponse queryResponse = clusterController.getResources(type, request, predicate);
@@ -534,7 +534,7 @@ public class QueryImpl implements Query, ResourceInstance {
       // NoSuchResourceException (404 response) for an empty query result
       if(!isCollectionResource()) {
         throw new NoSuchResourceException(
-            "The requested resource doesn't exist: " + type.toString() + " not found where " + predicate + ".");
+            "The requested resource doesn't exist: " + type + " not found where " + predicate + ".");
       }
     }
     return queryResponse;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/rest/BootStrapResource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/rest/BootStrapResource.java b/ambari-server/src/main/java/org/apache/ambari/server/api/rest/BootStrapResource.java
index e5e1aa6..1bf22b1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/rest/BootStrapResource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/rest/BootStrapResource.java
@@ -40,8 +40,8 @@ import org.apache.ambari.server.bootstrap.BSResponse;
 import org.apache.ambari.server.bootstrap.BootStrapImpl;
 import org.apache.ambari.server.bootstrap.BootStrapStatus;
 import org.apache.ambari.server.bootstrap.SshHostInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 
@@ -49,7 +49,7 @@ import com.google.inject.Inject;
 public class BootStrapResource {
 
   private static BootStrapImpl bsImpl;
-  private static Log LOG = LogFactory.getLog(BootStrapResource.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BootStrapResource.class);
 
   @Inject
   public static void init(BootStrapImpl instance) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 76694d4..8965683 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -496,10 +496,7 @@ public class AmbariMetaInfo {
   public String getComponentToService(String stackName, String version,
                                       String componentName) throws AmbariException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Looking for service for component"
-          + ", stackName=" + stackName
-          + ", stackVersion=" + version
-          + ", componentName=" + componentName);
+      LOG.debug("Looking for service for component, stackName={}, stackVersion={}, componentName={}", stackName, version, componentName);
     }
     Map<String, ServiceInfo> services = getServices(stackName, version);
     String retService = null;
@@ -839,13 +836,12 @@ public class AmbariMetaInfo {
 
   private void getCustomActionDefinitions(File customActionDefinitionRoot) throws JAXBException, AmbariException {
     if (customActionDefinitionRoot != null) {
-      LOG.debug("Loading custom action definitions from "
-          + customActionDefinitionRoot.getAbsolutePath());
+      LOG.debug("Loading custom action definitions from {}", customActionDefinitionRoot.getAbsolutePath());
 
       if (customActionDefinitionRoot.exists() && customActionDefinitionRoot.isDirectory()) {
         adManager.readCustomActionDefinitions(customActionDefinitionRoot);
       } else {
-        LOG.debug("No action definitions found at " + customActionDefinitionRoot.getAbsolutePath());
+        LOG.debug("No action definitions found at {}", customActionDefinitionRoot.getAbsolutePath());
       }
     }
   }
@@ -1002,7 +998,7 @@ public class AmbariMetaInfo {
     ServiceInfo svc = getService(stackName, stackVersion, serviceName);
 
     if (null == svc.getMetricsFile() || !svc.getMetricsFile().exists()) {
-      LOG.debug("Metrics file for " + stackName + "/" + stackVersion + "/" + serviceName + " not found.");
+      LOG.debug("Metrics file for {}/{}/{} not found.", stackName, stackVersion, serviceName);
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseRequest.java
index bf4d930..ebfc62e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseRequest.java
@@ -134,7 +134,7 @@ public abstract class BaseRequest implements Request {
   @Override
   public Result process() {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Handling API Request: '" + getURI() + "'");
+      LOG.debug("Handling API Request: '{}'", getURI());
     }
 
     Result result;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java
index 6a6b413..59b1d4e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java
@@ -35,15 +35,15 @@ import javax.xml.bind.JAXBException;
 
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 
 @Path("/persist/")
 public class PersistKeyValueService {
   private static PersistKeyValueImpl persistKeyVal;
-  private static Log LOG = LogFactory.getLog(PersistKeyValueService.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PersistKeyValueService.class);
 
   @Inject
   public static void init(PersistKeyValueImpl instance) {
@@ -56,7 +56,7 @@ public class PersistKeyValueService {
   public Response update(String keyValues)
       throws WebApplicationException, InvalidStateTransitionException,
       JAXBException, IOException {
-    LOG.debug("Received message from UI " + keyValues);
+    LOG.debug("Received message from UI {}", keyValues);
     Map<String, String> keyValuesMap = StageUtils.fromJson(keyValues, Map.class);
     /* Call into the heartbeat handler */
 
@@ -70,14 +70,14 @@ public class PersistKeyValueService {
   @PUT
   @Produces("text/plain")
   public String store(String values) throws IOException, JAXBException {
-    LOG.debug("Received message from UI " + values);
+    LOG.debug("Received message from UI {}", values);
     Collection<String> valueCollection = StageUtils.fromJson(values, Collection.class);
     Collection<String> keys = new ArrayList<>(valueCollection.size());
     for (String s : valueCollection) {
       keys.add(persistKeyVal.put(s));
     }
     String stringRet = StageUtils.jaxbToString(keys);
-    LOG.debug("Returning " + stringRet);
+    LOG.debug("Returning {}", stringRet);
     return stringRet;
   }
 
@@ -85,7 +85,7 @@ public class PersistKeyValueService {
   @Produces("text/plain")
   @Path("{keyName}")
   public String getKey( @PathParam("keyName") String keyName) {
-    LOG.debug("Looking for keyName " + keyName);
+    LOG.debug("Looking for keyName {}", keyName);
     return persistKeyVal.getValue(keyName);
   }
 
@@ -94,7 +94,7 @@ public class PersistKeyValueService {
   public String getAllKeyValues() throws JAXBException, IOException {
     Map<String, String> ret = persistKeyVal.getAllKeyValues();
     String stringRet = StageUtils.jaxbToString(ret);
-    LOG.debug("Returning " + stringRet);
+    LOG.debug("Returning {}", stringRet);
     return stringRet;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
index b835345..504bf04 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
@@ -98,7 +98,7 @@ public class JsonRequestBodyParser implements RequestBodyParser {
       } catch (IOException e) {
         if (LOG.isDebugEnabled()) {
           LOG.debug("Caught exception parsing msg body.");
-          LOG.debug("Message Body: " + body, e);
+          LOG.debug("Message Body: {}", body, e);
         }
         throw new BodyParseException(e);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
index 7731b22..273c0ff 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
@@ -53,7 +53,7 @@ import com.google.inject.Singleton;
 @Singleton
 public class StackAdvisorBlueprintProcessor {
 
-  private static Logger LOG = LoggerFactory.getLogger(StackAdvisorBlueprintProcessor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(StackAdvisorBlueprintProcessor.class);
 
   private static StackAdvisorHelper stackAdvisorHelper;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
index ed27697..c2895d9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
@@ -51,14 +51,14 @@ import org.apache.ambari.server.utils.DateUtils;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.SerializationConfig;
 import org.codehaus.jackson.node.ArrayNode;
 import org.codehaus.jackson.node.ObjectNode;
 import org.codehaus.jackson.node.TextNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Parent for all commands.
@@ -71,7 +71,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
    */
   private Class<T> type;
 
-  protected static Log LOG = LogFactory.getLog(StackAdvisorCommand.class);
+  protected static Logger LOG = LoggerFactory.getLogger(StackAdvisorCommand.class);
 
   private static final String GET_HOSTS_INFO_URI = "/api/v1/hosts"
       + "?fields=Hosts/*&Hosts/host_name.in(%s)";
@@ -374,7 +374,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
 
     String hostsJSON = (String) response.getEntity();
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Hosts information: " + hostsJSON);
+      LOG.debug("Hosts information: {}", hostsJSON);
     }
 
     Collection<String> unregistered = getUnregisteredHosts(hostsJSON, request.getHosts());
@@ -428,7 +428,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
 
     String servicesJSON = (String) response.getEntity();
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Services information: " + servicesJSON);
+      LOG.debug("Services information: {}", servicesJSON);
     }
     return servicesJSON;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/api/services/views/ViewDataMigrationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/views/ViewDataMigrationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/views/ViewDataMigrationService.java
index 3b9c58d..df67ecc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/views/ViewDataMigrationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/views/ViewDataMigrationService.java
@@ -28,8 +28,8 @@ import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 import org.apache.ambari.server.view.ViewDataMigrationUtility;
 import org.apache.ambari.server.view.ViewRegistry;
 import org.apache.ambari.view.migration.ViewDataMigrationException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import io.swagger.annotations.Api;
 import io.swagger.annotations.ApiOperation;
@@ -46,7 +46,7 @@ public class ViewDataMigrationService extends BaseService {
   /**
    * Logger.
    */
-  private static final Log LOG = LogFactory.getLog(ViewDataMigrationService.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ViewDataMigrationService.class);
 
   /**
    * The singleton view registry.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSHostStatusCollector.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSHostStatusCollector.java b/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSHostStatusCollector.java
index fa8a22a..b72ca20 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSHostStatusCollector.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSHostStatusCollector.java
@@ -27,8 +27,8 @@ import java.util.List;
 
 import org.apache.ambari.server.utils.Closeables;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Runnable class that gets the hoststatus output by looking at the files
@@ -39,7 +39,7 @@ class BSHostStatusCollector {
   private List<BSHostStatus> hostStatus;
   public static final String logFileFilter = ".log";
   public static final String doneFileFilter = ".done";
-  private static Log LOG = LogFactory.getLog(BSHostStatusCollector.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BSHostStatusCollector.class);
 
   private List<String> hosts;
 
@@ -68,11 +68,7 @@ class BSHostStatusCollector {
       done = new File(requestIdDir, host + doneFileFilter);
       log = new File(requestIdDir, host + logFileFilter);
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Polling bootstrap status for host"
-            + ", requestDir=" + requestIdDir
-            + ", host=" + host
-            + ", doneFileExists=" + done.exists()
-            + ", logFileExists=" + log.exists());
+        LOG.debug("Polling bootstrap status for host, requestDir={}, host={}, doneFileExists={}, logFileExists={}", requestIdDir, host, done.exists(), log.exists());
       }
       if (!done.exists()) {
         status.setStatus("RUNNING");

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSRunner.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSRunner.java b/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSRunner.java
index c2909a6..c7976ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSRunner.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BSRunner.java
@@ -32,15 +32,15 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.server.bootstrap.BootStrapStatus.BSStat;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * @author ncole
  *
  */
 class BSRunner extends Thread {
-  private static Log LOG = LogFactory.getLog(BSRunner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BSRunner.class);
 
   private static final String DEFAULT_USER = "root";
   private static final String DEFAULT_SSHPORT = "22";
@@ -212,8 +212,7 @@ class BSRunner extends Thread {
       if (LOG.isDebugEnabled()) {
         // FIXME needs to be removed later
         // security hole
-        LOG.debug("Using ssh key=\""
-            + sshHostInfo.getSshKey() + "\"");
+        LOG.debug("Using ssh key=\"{}\"", sshHostInfo.getSshKey());
       }
 
       String password = sshHostInfo.getPassword();
@@ -338,8 +337,7 @@ class BSRunner extends Thread {
             pendingHosts = true;
           }
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Whether hosts status yet to be updated, pending="
-                + pendingHosts);
+            LOG.debug("Whether hosts status yet to be updated, pending={}", pendingHosts);
           }
           if (!pendingHosts) {
             break;
@@ -441,7 +439,7 @@ class BSRunner extends Thread {
         }
       }
     } catch (FileNotFoundException ex) {
-      LOG.error(ex);
+      LOG.error(ex.toString());
     } finally {
       if (setupAgentDoneWriter != null) {
         setupAgentDoneWriter.close();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
index c166326..bfe40ea 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
@@ -27,8 +27,8 @@ import java.util.List;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.bootstrap.BSResponse.BSRunStat;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
@@ -44,7 +44,7 @@ public class BootStrapImpl {
   private String masterHostname;
   long timeout;
 
-  private static Log LOG = LogFactory.getLog(BootStrapImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BootStrapImpl.class);
 
   /* Monotonically increasing requestid for the bootstrap api to query on */
   int requestId = 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
index 4fdecc7..ff3aeda 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
@@ -43,7 +43,7 @@ import com.google.inject.Singleton;
  */
 @Singleton
 public class UpgradeCheckRegistry {
-  private static Logger LOG = LoggerFactory.getLogger(UpgradeCheckRegistry.class);
+  private static final Logger LOG = LoggerFactory.getLogger(UpgradeCheckRegistry.class);
 
   /**
    * The list of upgrade checks to run through.
@@ -97,7 +97,7 @@ public class UpgradeCheckRegistry {
         try {
           URL url = jar.toURI().toURL();
           urls.add(url);
-          LOG.debug("Adding service check jar to classpath: {}", url.toString());
+          LOG.debug("Adding service check jar to classpath: {}", url);
         }
         catch (Exception e) {
           LOG.error("Failed to add service check jar to classpath: {}", jar.getAbsolutePath(), e);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 2fa965e..1b0e0e0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -331,12 +331,9 @@ public class AmbariActionExecutionHelper {
       );
 
       if (! ignoredHosts.isEmpty()) {
-        LOG.debug("Hosts to ignore: {}.", StringUtils.join(ignoredHosts, ", "));
-        LOG.debug("Ignoring action for hosts due to maintenance state." +
-            "Ignored hosts =" + ignoredHosts + ", component="
-            + componentName + ", service=" + serviceName
-            + ", cluster=" + cluster.getClusterName() + ", " +
-            "actionName=" + actionContext.getActionName());
+        LOG.debug("Hosts to ignore: {}.", ignoredHosts);
+        LOG.debug("Ignoring action for hosts due to maintenance state.Ignored hosts ={}, component={}, service={}, cluster={}, actionName={}",
+          ignoredHosts, componentName, serviceName, cluster.getClusterName(), actionContext.getActionName());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 554ae1d..9083a66 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -851,7 +851,7 @@ public class AmbariCustomCommandExecutionHelper {
     cloneSet.retainAll(includedHosts);
     if (cloneSet.size() > 0) {
       throw new AmbariException("Same host cannot be specified for inclusion " +
-        "as well as exclusion. Hosts: " + cloneSet.toString());
+        "as well as exclusion. Hosts: " + cloneSet);
     }
 
     Service service = cluster.getService(serviceName);
@@ -1111,10 +1111,8 @@ public class AmbariCustomCommandExecutionHelper {
     List<RequestResourceFilter> resourceFilters = actionExecutionContext.getResourceFilters();
 
     for (RequestResourceFilter resourceFilter : resourceFilters) {
-      LOG.debug("Received a command execution request"
-        + ", clusterName=" + actionExecutionContext.getClusterName()
-        + ", serviceName=" + resourceFilter.getServiceName()
-        + ", request=" + actionExecutionContext.toString());
+      LOG.debug("Received a command execution request, clusterName={}, serviceName={}, request={}",
+        actionExecutionContext.getClusterName(), resourceFilter.getServiceName(), actionExecutionContext);
 
       String actionName = actionExecutionContext.getActionName();
       if (actionName.contains(SERVICE_CHECK_COMMAND_NAME)) {
@@ -1554,7 +1552,7 @@ public class AmbariCustomCommandExecutionHelper {
 
       return service.getServiceComponent(resourceFilter.getComponentName());
     } catch (Exception e) {
-      LOG.debug(String.format( "Unknown error appears during getting service component: %s", e.getMessage()));
+      LOG.debug("Unknown error appears during getting service component: {}", e.getMessage());
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
index 9645a36..e134f6d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
@@ -144,7 +144,7 @@ public class AmbariHandlerList extends HandlerCollection implements ViewInstance
       try {
         ClassLoader viewClassLoader = viewEntity.getClassLoader();
         if (viewClassLoader == null) {
-          LOG.debug("No class loader associated with view " + viewEntity.getName() + ".");
+          LOG.debug("No class loader associated with view {}.", viewEntity.getName());
         } else {
           Thread.currentThread().setContextClassLoader(viewClassLoader);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 2a9d6c9..eb64030 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -435,9 +435,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Received a createCluster request"
-          + ", clusterName=" + request.getClusterName()
-          + ", request=" + request);
+      LOG.debug("Received a createCluster request, clusterName={}, request={}", request.getClusterName(), request);
     }
 
     if (request.getStackVersion() == null
@@ -533,12 +531,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       }
 
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received a createHostComponent request"
-            + ", clusterName=" + request.getClusterName()
-            + ", serviceName=" + request.getServiceName()
-            + ", componentName=" + request.getComponentName()
-            + ", hostname=" + request.getHostname()
-            + ", request=" + request);
+        LOG.debug("Received a createHostComponent request, clusterName={}, serviceName={}, componentName={}, hostname={}, request={}",
+          request.getClusterName(), request.getServiceName(), request.getComponentName(), request.getHostname(), request);
       }
 
       if (!hostComponentNames.containsKey(request.getClusterName())) {
@@ -604,17 +598,12 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
           clusters.getClustersForHost(request.getHostname());
       boolean validCluster = false;
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Looking to match host to cluster"
-            + ", hostnameViaReg=" + host.getHostName()
-            + ", hostname=" + request.getHostname()
-            + ", clusterName=" + request.getClusterName()
-            + ", hostClusterMapCount=" + mappedClusters.size());
+        LOG.debug("Looking to match host to cluster, hostnameViaReg={}, hostname={}, clusterName={}, hostClusterMapCount={}",
+          host.getHostName(), request.getHostname(), request.getClusterName(), mappedClusters.size());
       }
       for (Cluster mappedCluster : mappedClusters) {
         if (LOG.isDebugEnabled()) {
-          LOG.debug("Host belongs to cluster"
-              + ", hostname=" + request.getHostname()
-              + ", clusterName=" + mappedCluster.getClusterName());
+          LOG.debug("Host belongs to cluster, hostname={}, clusterName={}", request.getHostname(), mappedCluster.getClusterName());
         }
         if (mappedCluster.getClusterName().equals(
             request.getClusterName())) {
@@ -660,7 +649,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       } else {
         msg = "Attempted to create host_component's which already exist: ";
       }
-      throw new DuplicateResourceException(msg + names.toString());
+      throw new DuplicateResourceException(msg + names);
     }
 
     // set restartRequired flag for  monitoring services
@@ -1000,7 +989,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       throws AmbariException {
     final Set<MemberResponse> responses = new HashSet<>();
     for (MemberRequest request: requests) {
-      LOG.debug("Received a getMembers request, " + request.toString());
+      LOG.debug("Received a getMembers request, {}", request);
       final Group group = users.getGroup(request.getGroupName());
       if (null == group) {
         if (requests.size() == 1) {
@@ -1065,10 +1054,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     Set<ClusterResponse> response = new HashSet<>();
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Received a getClusters request"
-        + ", clusterName=" + request.getClusterName()
-        + ", clusterId=" + request.getClusterId()
-        + ", stackInfo=" + request.getStackVersion());
+      LOG.debug("Received a getClusters request, clusterName={}, clusterId={}, stackInfo={}",
+        request.getClusterName(), request.getClusterId(), request.getStackVersion());
     }
 
     Cluster singleCluster = null;
@@ -1128,7 +1115,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     StringBuilder builder = new StringBuilder();
     if (LOG.isDebugEnabled()) {
       clusters.debugDump(builder);
-      LOG.debug("Cluster State for cluster " + builder.toString());
+      LOG.debug("Cluster State for cluster {}", builder);
     }
     return response;
   }
@@ -1652,7 +1639,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     ServiceConfigVersionResponse serviceConfigVersionResponse = null;
 
     if (request.getDesiredConfig() != null && request.getServiceConfigVersionRequest() != null) {
-      String msg = "Unable to set desired configs and rollback at same time, request = " + request.toString();
+      String msg = "Unable to set desired configs and rollback at same time, request = " + request;
       LOG.error(msg);
       throw new IllegalArgumentException(msg);
     }
@@ -1660,7 +1647,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     // set the new name of the cluster if change is requested
     if (!cluster.getClusterName().equals(request.getClusterName())) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received cluster name change request from " + cluster.getClusterName() + " to " + request.getClusterName());
+        LOG.debug("Received cluster name change request from {} to {}", cluster.getClusterName(), request.getClusterName());
       }
 
       if(!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, EnumSet.of(RoleAuthorization.AMBARI_RENAME_CLUSTER))) {
@@ -2466,11 +2453,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
     String repoInfo = customCommandExecutionHelper.getRepoInfo(cluster, component, host);
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Sending repo information to agent"
-        + ", hostname=" + scHost.getHostName()
-        + ", clusterName=" + clusterName
-        + ", stackInfo=" + stackId.getStackId()
-        + ", repoInfo=" + repoInfo);
+      LOG.debug("Sending repo information to agent, hostname={}, clusterName={}, stackInfo={}, repoInfo={}",
+        scHost.getHostName(), clusterName, stackId.getStackId(), repoInfo);
     }
 
     Map<String, String> hostParams = new TreeMap<>();
@@ -2979,15 +2963,12 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
                     + ", newDesiredState=" + newState);
               default:
                 throw new AmbariException("Unsupported state change operation"
-                    + ", newState=" + newState.toString());
+                    + ", newState=" + newState);
             }
 
             if (LOG.isDebugEnabled()) {
-              LOG.debug("Create a new host action"
-                  + ", requestId=" + requestStages.getId()
-                  + ", componentName=" + scHost.getServiceComponentName()
-                  + ", hostname=" + scHost.getHostName()
-                  + ", roleCommand=" + roleCommand.name());
+              LOG.debug("Create a new host action, requestId={}, componentName={}, hostname={}, roleCommand={}",
+                requestStages.getId(), scHost.getServiceComponentName(), scHost.getHostName(), roleCommand.name());
             }
 
             // any targeted information
@@ -3663,8 +3644,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
     for (UserRequest r : requests) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received a delete user request"
-            + ", username=" + r.getUsername());
+        LOG.debug("Received a delete user request, username={}", r.getUsername());
       }
       User u = users.getAnyUser(r.getUsername());
       if (null != u) {
@@ -3676,7 +3656,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   @Override
   public void deleteGroups(Set<GroupRequest> requests) throws AmbariException {
     for (GroupRequest request: requests) {
-      LOG.debug("Received a delete group request, groupname=" + request.getGroupName());
+      LOG.debug("Received a delete group request, groupname={}", request.getGroupName());
       final Group group = users.getGroup(request.getGroupName());
       if (group != null) {
         users.removeGroup(group);
@@ -3687,7 +3667,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   @Override
   public void deleteMembers(java.util.Set<MemberRequest> requests) throws AmbariException {
     for (MemberRequest request : requests) {
-      LOG.debug("Received a delete member request, " + request);
+      LOG.debug("Received a delete member request, {}", request);
       users.removeMemberFromGroup(request.getGroupName(), request.getUserName());
     }
   }
@@ -3839,8 +3819,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     for (UserRequest r : requests) {
 
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received a getUsers request"
-            + ", userRequest=" + r.toString());
+        LOG.debug("Received a getUsers request, userRequest={}", r);
       }
 
       String requestedUsername = r.getUsername();
@@ -3894,7 +3873,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       throws AmbariException {
     final Set<GroupResponse> responses = new HashSet<>();
     for (GroupRequest request: requests) {
-      LOG.debug("Received a getGroups request, groupRequest=" + request.toString());
+      LOG.debug("Received a getGroups request, groupRequest={}", request);
       // get them all
       if (null == request.getGroupName()) {
         for (Group group: users.getAllGroups()) {
@@ -3993,10 +3972,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         }
       }
     );
-    LOG.debug("Ignoring hosts when selecting available hosts for action" +
-            " due to maintenance state." +
-            "Ignored hosts =" + ignoredHosts + ", cluster="
-            + cluster.getClusterName() + ", service=" + service.getName());
+    LOG.debug("Ignoring hosts when selecting available hosts for action due to maintenance state.Ignored hosts ={}, cluster={}, service={}",
+      ignoredHosts, cluster.getClusterName(), service.getName());
   }
 
   /**
@@ -4066,7 +4043,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
       LOG.info("Received action execution request"
         + ", clusterName=" + actionRequest.getClusterName()
-        + ", request=" + actionRequest.toString());
+        + ", request=" + actionRequest);
     }
 
     ActionExecutionContext actionExecContext = getActionExecutionContext(actionRequest);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 2f799b7..aeba739 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -170,7 +170,7 @@ import com.sun.jersey.spi.container.servlet.ServletContainer;
 @Singleton
 public class AmbariServer {
   public static final String VIEWS_URL_PATTERN = "/api/v1/views/*";
-  private static Logger LOG = LoggerFactory.getLogger(AmbariServer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AmbariServer.class);
 
   /**
    * The thread name prefix for threads handling agent requests.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index 4fa2362..f3c2ec8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -181,7 +181,7 @@ import com.mchange.v2.c3p0.ComboPooledDataSource;
  * Used for injection purposes.
  */
 public class ControllerModule extends AbstractModule {
-  private static Logger LOG = LoggerFactory.getLogger(ControllerModule.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ControllerModule.class);
   private static final String AMBARI_PACKAGE = "org.apache.ambari.server";
 
   private final Configuration configuration;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractPropertyProvider.java
index f7f20cc..322596e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractPropertyProvider.java
@@ -174,7 +174,7 @@ public abstract class AbstractPropertyProvider extends BaseProvider implements P
       }
     }
     if(LOG.isDebugEnabled()) {
-      LOG.debug("Retrieved Cluster Ids = " + clusterResId.toString());
+      LOG.debug("Retrieved Cluster Ids = {}", clusterResId);
     }
     return clusterResId;
   }
@@ -204,7 +204,7 @@ public abstract class AbstractPropertyProvider extends BaseProvider implements P
     }
 
     if(LOG.isDebugEnabled()) {
-      LOG.debug("Retrieved cluster's Resource Id = " + clusterResIds + ", Resource Type = " + resType);
+      LOG.debug("Retrieved cluster's Resource Id = {}, Resource Type = {}", clusterResIds, resType);
     }
     Iterator<Long> clusResIdsItr = clusterResIds.iterator();
     while (clusResIdsItr.hasNext()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index 8975837..0e4f3f4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -409,7 +409,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
     if (!vipHostConfigPresent) {
       currentCollectorHost = metricsCollectorHAManager.getCollectorHost(clusterName);
       }
-    LOG.debug("Cluster Metrics Vip Host : " + clusterMetricserverVipHost);
+    LOG.debug("Cluster Metrics Vip Host : {}", clusterMetricserverVipHost);
 
     return (clusterMetricserverVipHost != null) ? clusterMetricserverVipHost : currentCollectorHost;
   }
@@ -560,7 +560,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
       }
     }
 
-    LOG.debug("jmxPortMap -> " + jmxPortMap);
+    LOG.debug("jmxPortMap -> {}", jmxPortMap);
 
     ConcurrentMap<String, String> hostJmxPorts = clusterJmxPorts.get(hostName);
     if (hostJmxPorts == null) {
@@ -1018,7 +1018,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
           }
         }
         value = postProcessPropertyValue(propName, value, evaluatedProperties, null);
-        LOG.debug("PROPERTY -> key: " + propName + ", " + "value: " + value);
+        LOG.debug("PROPERTY -> key: {}, value: {}", propName, value);
 
         mConfigs.put(entry.getKey(), value);
 
@@ -1152,15 +1152,12 @@ public abstract class AbstractProviderModule implements ProviderModule,
       jmxProtocolString = "http";
     }
     if (jmxProtocolString == null) {
-      LOG.debug("Detected JMX protocol is null for clusterName = " + clusterName +
-          ", componentName = " + componentName);
-      LOG.debug("Defaulting JMX to HTTP protocol for  for clusterName = " + clusterName +
-          ", componentName = " + componentName);
+      LOG.debug("Detected JMX protocol is null for clusterName = {}, componentName = {}", clusterName, componentName);
+      LOG.debug("Defaulting JMX to HTTP protocol for  for clusterName = {}, componentName = {}", clusterName, componentName);
       jmxProtocolString = "http";
     }
     if (LOG.isDebugEnabled()) {
-      LOG.debug("JMXProtocol = " + jmxProtocolString + ", for clusterName=" + clusterName +
-          ", componentName = " + componentName);
+      LOG.debug("JMXProtocol = {}, for clusterName={}, componentName = {}", jmxProtocolString, clusterName, componentName);
     }
     clusterJmxProtocolMap.put(mapKey, jmxProtocolString);
     return jmxProtocolString;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ActionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ActionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ActionResourceProvider.java
index db9842a..4e618e7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ActionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ActionResourceProvider.java
@@ -100,8 +100,7 @@ public class ActionResourceProvider extends AbstractControllerResourceProvider {
     if (predicate != null) {
       for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
         ActionRequest actionReq = getRequest(propertyMap);
-        LOG.debug("Received a get request for Action with"
-            + ", actionName = " + actionReq.getActionName());
+        LOG.debug("Received a get request for Action with, actionName = {}", actionReq.getActionName());
         requests.add(actionReq);
       }
     } else {


[43/50] [abbrv] ambari git commit: AMBARI-19149. Code cleanup: concatenation in debug messages, unnecessary toString calls

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
index cda9ab0..93ae810 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
@@ -161,7 +161,7 @@ public class StackRoleCommandOrder {
 			}
 			depValue = buffer.toString();
 		  }
-		  LOG.debug(depKey + " => " + depValue);
+      LOG.debug("{} => {}", depKey, depValue);
 		}
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
index b8e57c7..8662958 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
@@ -54,7 +54,7 @@ public class UpgradePack {
 
   private static final String ALL_VERSIONS = "*";
 
-  private static Logger LOG = LoggerFactory.getLogger(UpgradePack.class);
+  private static final Logger LOG = LoggerFactory.getLogger(UpgradePack.class);
 
   /**
    * Name of the file without the extension, such as upgrade-2.2

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
index b19d72d..18f8cce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
@@ -54,7 +54,7 @@ import com.google.gson.JsonPrimitive;
 @XmlType(name="colocated")
 public class ColocatedGrouping extends Grouping {
 
-  private static Logger LOG = LoggerFactory.getLogger(ColocatedGrouping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ColocatedGrouping.class);
 
   @XmlElement(name="batch")
   public Batch batch;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigUpgradeChangeDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigUpgradeChangeDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigUpgradeChangeDefinition.java
index 452b66a..89b6567 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigUpgradeChangeDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigUpgradeChangeDefinition.java
@@ -82,7 +82,7 @@ import com.google.common.base.Objects;
 @XmlAccessorType(XmlAccessType.FIELD)
 public class ConfigUpgradeChangeDefinition {
 
-  private static Logger LOG = LoggerFactory.getLogger(ConfigUpgradeChangeDefinition.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigUpgradeChangeDefinition.class);
 
   /**
    * The key that represents the configuration type to change (ie hdfs-site).

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
index 58d63b3..68dc63f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
@@ -60,7 +60,7 @@ import com.google.gson.Gson;
 @XmlType(name="configure")
 public class ConfigureTask extends ServerSideActionTask {
 
-  private static Logger LOG = LoggerFactory.getLogger(ConfigureTask.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigureTask.class);
 
   /**
    * The key that represents the configuration type to change (ie hdfs-site).

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/HostOrderGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/HostOrderGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/HostOrderGrouping.java
index 4d5003f..dd2dd02 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/HostOrderGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/HostOrderGrouping.java
@@ -56,7 +56,7 @@ import com.google.gson.JsonObject;
 public class HostOrderGrouping extends Grouping {
   private static final String TYPE = "type";
   private static final String HOST = "host";
-  private static Logger LOG = LoggerFactory.getLogger(HostOrderGrouping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HostOrderGrouping.class);
 
   /**
    * Contains the ordered actions to schedule for this grouping.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RestartGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RestartGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RestartGrouping.java
index 0e3b4af..6ad7531 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RestartGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RestartGrouping.java
@@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory;
 @XmlType(name="restart")
 public class RestartGrouping extends Grouping implements UpgradeFunction {
 
-  private static Logger LOG = LoggerFactory.getLogger(RestartGrouping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RestartGrouping.class);
 
   @Override
   public Task.Type getFunction() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ServiceCheckGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ServiceCheckGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ServiceCheckGrouping.java
index ef1d0c3..61a387f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ServiceCheckGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ServiceCheckGrouping.java
@@ -50,7 +50,7 @@ import org.slf4j.LoggerFactory;
 @XmlType(name="service-check")
 public class ServiceCheckGrouping extends Grouping {
 
-  private static Logger LOG = LoggerFactory.getLogger(ServiceCheckGrouping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ServiceCheckGrouping.class);
 
   /**
    * During a Rolling Upgrade, the priority services are ran first, then the remaining services in the cluster.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StartGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StartGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StartGrouping.java
index d27e6d8..9a22789 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StartGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StartGrouping.java
@@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory;
 @XmlType(name="start")
 public class StartGrouping extends Grouping implements UpgradeFunction {
 
-  private static Logger LOG = LoggerFactory.getLogger(StartGrouping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(StartGrouping.class);
 
   @Override
   public Task.Type  getFunction() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StopGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StopGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StopGrouping.java
index 44996c8..9a0d956 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StopGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StopGrouping.java
@@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory;
 @XmlType(name="stop")
 public class StopGrouping extends Grouping implements UpgradeFunction {
 
-  private static Logger LOG = LoggerFactory.getLogger(StopGrouping.class);
+  private static final Logger LOG = LoggerFactory.getLogger(StopGrouping.class);
 
   @Override
   public Task.Type getFunction() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
index 98e8736..23f2557 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
@@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory;
  */
 public class TaskWrapperBuilder {
 
-  private static Logger LOG = LoggerFactory.getLogger(TaskWrapperBuilder.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TaskWrapperBuilder.class);
 
   /**
    * Creates a collection of task wrappers based on the set of hosts they are allowed to run on

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index 9704dc5..f9d3cfc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@ -690,8 +690,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
         ServiceComponentHostInstallEvent e =
             (ServiceComponentHostInstallEvent) event;
         if (LOG.isDebugEnabled()) {
-          LOG.debug("Updating live stack version during INSTALL event"
-              + ", new stack version=" + e.getStackId());
+          LOG.debug("Updating live stack version during INSTALL event, new stack version={}", e.getStackId());
         }
       }
     }
@@ -1011,9 +1010,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
   public void handleEvent(ServiceComponentHostEvent event)
       throws InvalidStateTransitionException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Handling ServiceComponentHostEvent event,"
-          + " eventType=" + event.getType().name()
-          + ", event=" + event.toString());
+      LOG.debug("Handling ServiceComponentHostEvent event, eventType={}, event={}", event.getType().name(), event);
     }
     State oldState = getState();
     try {
@@ -1045,13 +1042,8 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
                + ", oldState=" + oldState
                + ", currentState=" + getState());
       if (LOG.isDebugEnabled()) {
-        LOG.debug("ServiceComponentHost transitioned to a new state"
-            + ", serviceComponentName=" + getServiceComponentName()
-            + ", hostName=" + getHostName()
-            + ", oldState=" + oldState
-            + ", currentState=" + getState()
-            + ", eventType=" + event.getType().name()
-            + ", event=" + event);
+        LOG.debug("ServiceComponentHost transitioned to a new state, serviceComponentName={}, hostName={}, oldState={}, currentState={}, eventType={}, event={}",
+          getServiceComponentName(), getHostName(), oldState, getState(), event.getType().name(), event);
       }
     }
   }
@@ -1393,7 +1385,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
           Long groupId = Long.parseLong(overrideEntry.getKey());
           hc.getConfigGroupOverrides().put(groupId, overrideEntry.getValue());
           if (!configGroupMap.containsKey(groupId)) {
-            LOG.debug("Config group does not exist, id = " + groupId);
+            LOG.debug("Config group does not exist, id = {}", groupId);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
index 4030e3a..1a43b85 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
@@ -110,7 +110,7 @@ public class BlueprintValidatorImpl implements BlueprintValidator {
       }
       if (containsSecretReferences) {
         throw new InvalidTopologyException("Secret references are not allowed in blueprints, " +
-          "replace following properties with real passwords:\n" + errorMessage.toString());
+          "replace following properties with real passwords:\n" + errorMessage);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
index 19d99ad..0ce5982 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
@@ -35,7 +35,7 @@ import com.google.inject.assistedinject.AssistedInject;
 
 public class ConfigureClusterTask implements Callable<Boolean> {
 
-  private static Logger LOG = LoggerFactory.getLogger(ConfigureClusterTask.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigureClusterTask.class);
 
   private ClusterConfigurationRequest configRequest;
   private ClusterTopology topology;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/TopologyHostTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/TopologyHostTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/TopologyHostTask.java
index 82a2f6e..e016ec8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/TopologyHostTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/TopologyHostTask.java
@@ -28,7 +28,7 @@ import org.springframework.security.core.context.SecurityContextHolder;
 
 public abstract class TopologyHostTask implements TopologyTask {
 
-  private static Logger LOG = LoggerFactory.getLogger(TopologyHostTask.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TopologyHostTask.class);
 
   ClusterTopology clusterTopology;
   HostRequest hostRequest;
@@ -56,4 +56,4 @@ public abstract class TopologyHostTask implements TopologyTask {
 
   public abstract void runTask();
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/ClusterConfigTypeValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/ClusterConfigTypeValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/ClusterConfigTypeValidator.java
index dce38b4..0170186 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/ClusterConfigTypeValidator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/ClusterConfigTypeValidator.java
@@ -59,7 +59,7 @@ public class ClusterConfigTypeValidator implements TopologyValidator {
       invalidConfigTypes.removeAll(configTypeIntersection);
 
       LOGGER.error("The following config typess are wrong: {}", invalidConfigTypes);
-      throw new InvalidTopologyException("The following configuration types are invalid: " + invalidConfigTypes.toString());
+      throw new InvalidTopologyException("The following configuration types are invalid: " + invalidConfigTypes);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 3f15400..1d277a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -1120,7 +1120,7 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
                 }
               }
               if (widgetDescriptor != null) {
-                LOG.debug("Loaded widget descriptor: " + widgetDescriptor);
+                LOG.debug("Loaded widget descriptor: {}", widgetDescriptor);
                 for (Object artifact : widgetDescriptor.values()) {
                   List<WidgetLayout> widgetLayouts = (List<WidgetLayout>) artifact;
                   for (WidgetLayout widgetLayout : widgetLayouts) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index 48cf5f6..dee05c3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -403,7 +403,7 @@ public class SchemaUpgradeHelper {
       UpgradeCatalog targetUpgradeCatalog = AbstractUpgradeCatalog
         .getUpgradeCatalog(targetVersion);
 
-      LOG.debug("Target upgrade catalog. " + targetUpgradeCatalog);
+      LOG.debug("Target upgrade catalog. {}", targetUpgradeCatalog);
 
       // Read source version from DB
       String sourceVersion = schemaUpgradeHelper.readSourceVersion();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
index 9632cd1..cc7dcb8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
@@ -647,7 +647,7 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
                   }
                 }
                 if (widgetDescriptor != null) {
-                  LOG.debug("Loaded widget descriptor: " + widgetDescriptor);
+                  LOG.debug("Loaded widget descriptor: {}", widgetDescriptor);
                   for (Object artifact : widgetDescriptor.values()) {
                     List<WidgetLayout> widgetLayouts = (List<WidgetLayout>) artifact;
                     for (WidgetLayout widgetLayout : widgetLayouts) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 1f3a99d..aed4adf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -328,7 +328,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
               clusterID, "storm_webui");
 
       if (stormServerProcessDefinitionEntity != null) {
-        LOG.info("Removing alert definition : " + stormServerProcessDefinitionEntity.toString());
+        LOG.info("Removing alert definition : " + stormServerProcessDefinitionEntity);
         alertDefinitionDAO.remove(stormServerProcessDefinitionEntity);
       }
 
@@ -336,7 +336,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
         LOG.info("Updating alert definition : " + stormWebAlert.getDefinitionName());
         String source = stormWebAlert.getSource();
         JsonObject sourceJson = new JsonParser().parse(source).getAsJsonObject();
-        LOG.debug("Source before update : " + sourceJson);
+        LOG.debug("Source before update : {}", sourceJson);
 
         JsonObject uriJson = sourceJson.get("uri").getAsJsonObject();
         uriJson.remove("https");
@@ -346,7 +346,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
         uriJson.addProperty("https_property", "{{storm-site/ui.https.keystore.type}}");
         uriJson.addProperty("https_property_value", "jks");
 
-        LOG.debug("Source after update : " + sourceJson);
+        LOG.debug("Source after update : {}", sourceJson);
         stormWebAlert.setSource(sourceJson.toString());
         alertDefinitionDAO.merge(stormWebAlert);
       }
@@ -370,7 +370,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
         LOG.info("Updating alert definition : " + logSearchWebAlert.getDefinitionName());
         String source = logSearchWebAlert.getSource();
         JsonObject sourceJson = new JsonParser().parse(source).getAsJsonObject();
-        LOG.debug("Source before update : " + sourceJson);
+        LOG.debug("Source before update : {}", sourceJson);
 
         JsonObject uriJson = sourceJson.get("uri").getAsJsonObject();
         uriJson.remove("https_property");
@@ -378,7 +378,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
         uriJson.addProperty("https_property", "{{logsearch-env/logsearch_ui_protocol}}");
         uriJson.addProperty("https_property_value", "https");
 
-        LOG.debug("Source after update : " + sourceJson);
+        LOG.debug("Source after update : {}", sourceJson);
         logSearchWebAlert.setSource(sourceJson.toString());
         alertDefinitionDAO.merge(logSearchWebAlert);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/utils/AmbariPath.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/AmbariPath.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/AmbariPath.java
index 790e0be..4e1d904 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/AmbariPath.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/AmbariPath.java
@@ -22,7 +22,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class AmbariPath {
-  private static Logger LOG = LoggerFactory.getLogger(AmbariPath.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AmbariPath.class);
   
   public static final String AMBARI_SERVER_ROOT_ENV_VARIABLE = "ROOT";
   public static final String rootDirectory = System.getenv(AMBARI_SERVER_ROOT_ENV_VARIABLE);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/utils/CommandUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/CommandUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/CommandUtils.java
index 296e4d0..af3171d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/CommandUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/CommandUtils.java
@@ -28,7 +28,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class CommandUtils {
-  private static Logger LOG = LoggerFactory.getLogger(CommandUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CommandUtils.class);
 
   /**
    * Converts a collection of commands to {@code}Map{@code} from command.taskId to command.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
index 2ab9ac5..dfd925d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/SecretReference.java
@@ -85,7 +85,7 @@ public class SecretReference {
   }
 
   public static String generateStub(String configType, Long configVersion, String propertyName) {
-    return secretPrefix + ":" + configType + ":" + configVersion.toString() + ":" + propertyName;
+    return secretPrefix + ":" + configType + ":" + configVersion + ":" + propertyName;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
index 4fcad3d..f6967a0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
@@ -28,15 +28,15 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
  * Logs OpenSsl command exit code with description
  */
 public class ShellCommandUtil {
-  private static final Log LOG = LogFactory.getLog(ShellCommandUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ShellCommandUtil.class);
   private static final Object WindowsProcessLaunchLock = new Object();
   private static final String PASS_TOKEN = "pass:";
   private static final String KEY_TOKEN = "-key ";

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
index 1d21ccd..9930148 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
@@ -56,12 +56,12 @@ import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
 import org.apache.ambari.server.topology.TopologyManager;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.JsonGenerationException;
 import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.SerializationConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
 import com.google.common.collect.Sets;
@@ -74,7 +74,7 @@ public class StageUtils {
   public static final String DEFAULT_RACK = "/default-rack";
   public static final String DEFAULT_IPV4_ADDRESS = "127.0.0.1";
 
-  private static final Log LOG = LogFactory.getLog(StageUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(StageUtils.class);
   protected static final String AMBARI_SERVER_HOST = "ambari_server_host";
   protected static final String AMBARI_SERVER_PORT = "ambari_server_port";
   protected static final String AMBARI_SERVER_USE_SSL = "ambari_server_use_ssl";

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/HttpImpersonatorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/HttpImpersonatorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/view/HttpImpersonatorImpl.java
index fc3f1d0..7e67891 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/HttpImpersonatorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/HttpImpersonatorImpl.java
@@ -45,7 +45,7 @@ public class HttpImpersonatorImpl implements HttpImpersonator {
   private ViewContext context;
   private final URLStreamProvider urlStreamProvider;
 
-  private static Logger LOG = LoggerFactory.getLogger(HttpImpersonatorImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HttpImpersonatorImpl.class);
 
   public HttpImpersonatorImpl(ViewContext c) {
     this.context = c;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewAmbariStreamProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewAmbariStreamProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewAmbariStreamProvider.java
index dfa937e..836796a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewAmbariStreamProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewAmbariStreamProvider.java
@@ -55,7 +55,7 @@ public class ViewAmbariStreamProvider implements AmbariStreamProvider {
    */
   private final AmbariManagementController controller;
 
-  private static Logger LOG = LoggerFactory.getLogger(ViewAmbariStreamProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ViewAmbariStreamProvider.class);
 
 
   // ----- Constructor -----------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewContextImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewContextImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewContextImpl.java
index b62f433..f6a8789 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewContextImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewContextImpl.java
@@ -54,14 +54,14 @@ import org.apache.ambari.view.ViewInstanceDefinition;
 import org.apache.ambari.view.cluster.Cluster;
 import org.apache.ambari.view.events.Event;
 import org.apache.ambari.view.events.Listener;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.directory.api.util.Strings;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.apache.velocity.VelocityContext;
 import org.apache.velocity.app.Velocity;
 import org.apache.velocity.exception.ParseErrorException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Guice;
 import com.google.inject.Injector;
@@ -75,7 +75,7 @@ public class ViewContextImpl implements ViewContext, ViewController {
   /**
    * Logger.
    */
-  private static final Log LOG = LogFactory.getLog(ViewContextImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ViewContextImpl.class);
 
   public static final String HADOOP_SECURITY_AUTH_TO_LOCAL = "hadoop.security.auth_to_local";
   public static final String CORE_SITE = "core-site";

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationContextImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationContextImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationContextImpl.java
index 0432265..a13b01e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationContextImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationContextImpl.java
@@ -34,8 +34,8 @@ import org.apache.ambari.view.PersistenceException;
 import org.apache.ambari.view.migration.EntityConverter;
 import org.apache.ambari.view.migration.ViewDataMigrationContext;
 import org.apache.ambari.view.migration.ViewDataMigrationException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.beans.BeanUtils;
 
 import com.google.inject.Guice;
@@ -50,7 +50,7 @@ public class ViewDataMigrationContextImpl implements ViewDataMigrationContext {
   /**
    * Logger.
    */
-  private static final Log LOG = LogFactory.getLog(ViewDataMigrationContextImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ViewDataMigrationContextImpl.class);
 
   /**
    * The data store of origin(source) view instance with source data.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
index 0c827fe..55f85a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
@@ -85,12 +85,12 @@ public class ViewDataMigrationUtility {
     Map<String, Class> originClasses = migrationContext.getOriginEntityClasses();
     Map<String, Class> currentClasses = migrationContext.getCurrentEntityClasses();
     for (Map.Entry<String, Class> originEntity : originClasses.entrySet()) {
-      LOG.debug("Migrating persistence entity " + originEntity.getKey());
+      LOG.debug("Migrating persistence entity {}", originEntity.getKey());
       if (currentClasses.containsKey(originEntity.getKey())) {
         Class entity = currentClasses.get(originEntity.getKey());
         dataMigrator.migrateEntity(originEntity.getValue(), entity);
       } else {
-        LOG.debug("Entity " + originEntity.getKey() + " not found in target view");
+        LOG.debug("Entity {} not found in target view", originEntity.getKey());
         dataMigrator.migrateEntity(originEntity.getValue(), null);
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDirectoryWatcher.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDirectoryWatcher.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDirectoryWatcher.java
index 717bc65..a1b984b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDirectoryWatcher.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDirectoryWatcher.java
@@ -40,8 +40,8 @@ import javax.annotation.Nullable;
 
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.utils.Closeables;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
@@ -68,7 +68,7 @@ public class ViewDirectoryWatcher implements DirectoryWatcher {
 
   private Future<?> watchTask;
 
-  private static Log LOG = LogFactory.getLog(ViewDirectoryWatcher.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ViewDirectoryWatcher.class);
 
   // Callbacks to hook into file processing
   private List<Function<Path, Boolean>> hooks = Lists.newArrayList(Collections.singleton(loggingHook()));

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
index 825a6c5..ad1cc52 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
@@ -108,13 +108,13 @@ public class ViewExtractor {
               try {
                 String   entryPath = archivePath + File.separator + jarEntry.getName();
 
-                LOG.debug("Extracting " + entryPath);
+                LOG.debug("Extracting {}", entryPath);
 
                 File entryFile = archiveUtility.getFile(entryPath);
 
                 if (jarEntry.isDirectory()) {
 
-                  LOG.debug("Making directory " + entryPath);
+                  LOG.debug("Making directory {}", entryPath);
 
                   if (!entryFile.mkdir()) {
                     msg = "Could not create archive entry directory " + entryPath + ".";
@@ -127,14 +127,14 @@ public class ViewExtractor {
 
                   FileOutputStream fos = archiveUtility.getFileOutputStream(entryFile);
                   try {
-                    LOG.debug("Begin copying from " + jarEntry.getName() + " to "+ entryPath);
+                    LOG.debug("Begin copying from {} to {}", jarEntry.getName(), entryPath);
 
                     byte[] buffer = new byte[BUFFER_SIZE];
                     int n;
                     while((n = jarInputStream.read(buffer)) > -1) {
                       fos.write(buffer, 0, n);
                     }
-                    LOG.debug("Finish copying from " + jarEntry.getName() + " to "+ entryPath);
+                    LOG.debug("Finish copying from {} to {}", jarEntry.getName(), entryPath);
 
                   } finally {
                     fos.flush();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 89b784f..c50276e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -578,8 +578,7 @@ public class ViewRegistry {
 
       if (getInstanceDefinition(viewName, version, instanceName) == null) {
         if (LOG.isDebugEnabled()) {
-          LOG.debug("Creating view instance " + viewName + "/" +
-              version + "/" + instanceName);
+          LOG.debug("Creating view instance {}/{}/{}", viewName, version, instanceName);
         }
 
         instanceEntity.validate(viewEntity, Validator.ValidationContext.PRE_CREATE);
@@ -706,8 +705,7 @@ public class ViewRegistry {
   @Transactional
   public void copyPrivileges(ViewInstanceEntity sourceInstanceEntity,
                              ViewInstanceEntity targetInstanceEntity) {
-    LOG.debug("Copy all privileges from " + sourceInstanceEntity.getName() + " to " +
-        targetInstanceEntity.getName());
+    LOG.debug("Copy all privileges from {} to {}", sourceInstanceEntity.getName(), targetInstanceEntity.getName());
     List<PrivilegeEntity> targetInstancePrivileges = privilegeDAO.findByResourceId(targetInstanceEntity.getResource().getId());
     if (targetInstancePrivileges.size() > 0) {
       LOG.warn("Target instance {} already has privileges assigned, these will not be deleted. Manual clean up may be needed", targetInstanceEntity.getName());
@@ -1496,13 +1494,13 @@ public class ViewRegistry {
     ViewEntity persistedView = viewDAO.findByName(viewName);
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Syncing view " + viewName + ".");
+      LOG.debug("Syncing view {}.", viewName);
     }
 
     // if the view is not yet persisted ...
     if (persistedView == null) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Creating view " + viewName + ".");
+        LOG.debug("Creating view {}.", viewName);
       }
 
       // create an admin resource type to represent this view
@@ -1543,7 +1541,7 @@ public class ViewRegistry {
       }
     }
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Syncing view " + viewName + " complete.");
+      LOG.debug("Syncing view {} complete.", viewName);
     }
   }
 
@@ -1852,14 +1850,13 @@ public class ViewRegistry {
 
   private void migrateDataFromPreviousVersion(ViewEntity viewDefinition, String serverVersion) {
     if (!viewDefinitions.containsKey(viewDefinition.getName())) { // migrate only registered views to avoid recursive calls
-      LOG.debug("Cancel auto migration of not loaded view: " + viewDefinition.getName() + ".");
+      LOG.debug("Cancel auto migration of not loaded view: {}.", viewDefinition.getName());
       return;
     }
     try {
 
       for (ViewInstanceEntity instance : viewDefinition.getInstances()) {
-        LOG.debug("Try to migrate the data from previous version of: " + viewDefinition.getName() + "/" +
-            instance.getInstanceName() + ".");
+        LOG.debug("Try to migrate the data from previous version of: {}/{}.", viewDefinition.getName(), instance.getInstanceName());
         ViewInstanceEntity latestUnregisteredView = getLatestUnregisteredInstance(serverVersion, instance);
 
         if (latestUnregisteredView != null) {
@@ -2246,12 +2243,12 @@ public class ViewRegistry {
               continue;
             }
 
-            LOG.debug("Unregistered extracted view found: " + archiveDir.getPath());
+            LOG.debug("Unregistered extracted view found: {}", archiveDir.getPath());
 
             ViewEntity uViewDefinition = new ViewEntity(uViewConfig, configuration, archiveDir.getPath());
             readViewArchive(uViewDefinition, archiveDir, archiveDir, serverVersion);
             for (ViewInstanceEntity instanceEntity : uViewDefinition.getInstances()) {
-              LOG.debug(uViewDefinition.getName() + " instance found: " + instanceEntity.getInstanceName());
+              LOG.debug("{} instance found: {}", uViewDefinition.getName(), instanceEntity.getInstanceName());
               unregInstancesTimestamps.put(instanceEntity, archiveDir.lastModified());
             }
           }
@@ -2272,10 +2269,10 @@ public class ViewRegistry {
       }
     }
     if (latestPrevInstance != null) {
-      LOG.debug("Previous version of " + instance.getViewEntity().getName() + "/" + instance.getName() + " found: " +
-          latestPrevInstance.getViewEntity().getName() + "/" + latestPrevInstance.getName());
+      LOG.debug("Previous version of {}/{} found: {}/{}",
+        instance.getViewEntity().getName(), instance.getName(), latestPrevInstance.getViewEntity().getName(), latestPrevInstance.getName());
     } else {
-      LOG.debug("Previous version of " + instance.getViewEntity().getName() + "/" + instance.getName() + " not found");
+      LOG.debug("Previous version of {}/{} not found", instance.getViewEntity().getName(), instance.getName());
     }
     return latestPrevInstance;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/view/ViewURLStreamProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewURLStreamProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewURLStreamProvider.java
index 9c44597..349b647 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewURLStreamProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewURLStreamProvider.java
@@ -37,16 +37,16 @@ import org.apache.ambari.view.URLConnectionProvider;
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.http.client.utils.URIBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Wrapper around an internal URL stream provider.
  */
 public class ViewURLStreamProvider implements org.apache.ambari.view.URLStreamProvider, URLConnectionProvider {
 
-  private static final Log LOG = LogFactory.getLog(ViewContextImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ViewContextImpl.class);
 
   /**
    * The key for the "doAs" header.
@@ -270,7 +270,7 @@ public class ViewURLStreamProvider implements org.apache.ambari.view.URLStreamPr
 
     public HostPortRestrictionHandler(String allowedHostPortsValue) {
       this.allowedHostPortsValue = allowedHostPortsValue;
-      LOG.debug("Proxy restriction will be derived from " + allowedHostPortsValue);
+      LOG.debug("Proxy restriction will be derived from {}", allowedHostPortsValue);
     }
 
     /**
@@ -282,7 +282,7 @@ public class ViewURLStreamProvider implements org.apache.ambari.view.URLStreamPr
      * @return if the host and port combination is allowed
      */
     public boolean allowProxy(String host, String port) {
-      LOG.debug("Checking host " + host + " port " + port + " against allowed list.");
+      LOG.debug("Checking host {} port {} against allowed list.", host, port);
       if (StringUtils.isNotBlank(host)) {
         String hostToCompare = host.trim().toLowerCase();
         if (allowedHostPorts == null) {
@@ -327,13 +327,13 @@ public class ViewURLStreamProvider implements org.apache.ambari.view.URLStreamPr
                 allowed.put(hostAndPort[0], new HashSet<String>());
               }
               allowed.get(hostAndPort[0]).add("*");
-              LOG.debug("Allow proxy to host " + hostAndPort[0] + " and all ports.");
+              LOG.debug("Allow proxy to host {} and all ports.", hostAndPort[0]);
             } else {
               if (!allowed.containsKey(hostAndPort[0])) {
                 allowed.put(hostAndPort[0], new HashSet<String>());
               }
               allowed.get(hostAndPort[0]).add(hostAndPort[1]);
-              LOG.debug("Allow proxy to host " + hostAndPort[0] + " and port " + hostAndPort[1]);
+              LOG.debug("Allow proxy to host {} and port {}", hostAndPort[0], hostAndPort[1]);
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index c2c548d..38b77da 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -75,13 +75,13 @@ import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
 import org.apache.ambari.server.topology.PersistedState;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.easymock.EasyMock;
 import org.eclipse.jetty.server.SessionManager;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -103,7 +103,7 @@ import junit.framework.Assert;
 
 public class AgentResourceTest extends RandomPortJerseyTest {
   static String PACKAGE_NAME = "org.apache.ambari.server.agent.rest";
-  private static Log LOG = LogFactory.getLog(AgentResourceTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AgentResourceTest.class);
   protected Client client;
   HeartBeatHandler handler;
   ActionManager actionManager;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/agent/LocalAgentSimulator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/LocalAgentSimulator.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/LocalAgentSimulator.java
index 9de1509..366f7b3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/LocalAgentSimulator.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/LocalAgentSimulator.java
@@ -20,15 +20,15 @@ package org.apache.ambari.server.agent;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The purpose of this class is to simulate the agent.
  */
 public class LocalAgentSimulator implements Runnable {
 
-  private static Log LOG = LogFactory.getLog(HeartBeatHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HeartBeatHandler.class);
 
   private Thread agentThread = null;
   private volatile boolean shouldRun = true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
index 0faa311..fb9268e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
@@ -34,7 +34,7 @@ import org.slf4j.LoggerFactory;
 
 public class TestActionQueue {
 
-  private static Logger LOG = LoggerFactory.getLogger(TestActionQueue.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestActionQueue.class);
 
   private static int threadCount = 100;
   static class ActionQueueOperation implements Runnable {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index 7d95ed2..baa9bae 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -559,7 +559,7 @@ public class TestHeartbeatHandler {
       handler.handleRegistration(reg);
       fail ("Expected failure for non compatible agent version");
     } catch (AmbariException e) {
-      log.debug("Error:" + e.getMessage());
+      log.debug("Error:{}", e.getMessage());
       Assert.assertTrue(e.getMessage().contains(
           "Cannot register host with non compatible agent version"));
     }
@@ -569,7 +569,7 @@ public class TestHeartbeatHandler {
       handler.handleRegistration(reg);
       fail ("Expected failure for non compatible agent version");
     } catch (AmbariException e) {
-      log.debug("Error:" + e.getMessage());
+      log.debug("Error:{}", e.getMessage());
       Assert.assertTrue(e.getMessage().contains(
           "Cannot register host with non compatible agent version"));
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 06f8918..349cb54 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -587,7 +587,7 @@ public class AmbariMetaInfoTest {
 
     for (List<RepositoryInfo> vals : repos.values()) {
       for (RepositoryInfo repo : vals) {
-        LOG.debug("Dumping repo info : " + repo.toString());
+        LOG.debug("Dumping repo info : {}", repo);
         if (repo.getOsType().equals("centos5")) {
           centos5Cnt.add(repo.getRepoId());
         } else if (repo.getOsType().equals("centos6")) {
@@ -1070,7 +1070,7 @@ public class AmbariMetaInfoTest {
 
               }
             }
-            Assert.assertEquals(failedMetrics.toString() +
+            Assert.assertEquals(failedMetrics +
                 " metrics defined with pointInTime=true for both jmx and ganglia types.",
               0, failedMetrics.size());
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/api/services/PersistServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/PersistServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/PersistServiceTest.java
index b68c666..2950872 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/PersistServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/PersistServiceTest.java
@@ -26,12 +26,12 @@ import org.apache.ambari.server.RandomPortJerseyTest;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.codehaus.jettison.json.JSONException;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
@@ -49,7 +49,7 @@ import junit.framework.Assert;
 
 public class PersistServiceTest extends RandomPortJerseyTest {
   static String PACKAGE_NAME = "org.apache.ambari.server.api.services";
-  private static Log LOG = LogFactory.getLog(PersistServiceTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PersistServiceTest.class);
   Injector injector;
   protected Client client;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapResourceTest.java
index b99b172..9034162 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapResourceTest.java
@@ -31,11 +31,11 @@ import javax.ws.rs.core.MediaType;
 import org.apache.ambari.server.api.rest.BootStrapResource;
 import org.apache.ambari.server.bootstrap.BSResponse.BSRunStat;
 import org.apache.ambari.server.bootstrap.BootStrapStatus.BSStat;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
@@ -54,7 +54,7 @@ import junit.framework.Assert;
 public class BootStrapResourceTest extends JerseyTest {
 
   static String PACKAGE_NAME = "org.apache.ambari.server.api.rest";
-  private static Log LOG = LogFactory.getLog(BootStrapResourceTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BootStrapResourceTest.class);
   Injector injector;
   BootStrapImpl bsImpl;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapTest.java b/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapTest.java
index 91ca644..b9da013 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/bootstrap/BootStrapTest.java
@@ -28,12 +28,12 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.bootstrap.BootStrapStatus.BSStat;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import junit.framework.Assert;
 import junit.framework.TestCase;
@@ -42,7 +42,7 @@ import junit.framework.TestCase;
  * Test BootStrap Implementation.
  */
 public class BootStrapTest extends TestCase {
-  private static Log LOG = LogFactory.getLog(BootStrapTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(BootStrapTest.class);
   public TemporaryFolder temp = new TemporaryFolder();
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index e10e4cd..3215e72 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -2054,8 +2054,7 @@ public class AmbariManagementControllerTest {
     boolean foundH2CLT = false;
 
     for (ShortTaskStatus taskStatus : taskStatuses) {
-      LOG.debug("Task dump :"
-          + taskStatus.toString());
+      LOG.debug("Task dump :{}", taskStatus);
       Assert.assertEquals(RoleCommand.INSTALL.toString(),
           taskStatus.getCommand());
       Assert.assertEquals(HostRoleStatus.PENDING.toString(),
@@ -2166,7 +2165,7 @@ public class AmbariManagementControllerTest {
 
     StringBuilder sb = new StringBuilder();
     clusters.debugDump(sb);
-    LOG.info("Cluster Dump: " + sb.toString());
+    LOG.info("Cluster Dump: " + sb);
 
     for (ServiceComponent sc :
       clusters.getCluster(cluster1).getService(serviceName)
@@ -3457,7 +3456,7 @@ public class AmbariManagementControllerTest {
     List<Stage> stages = actionDB.getAllStages(requestId);
 
     for (Stage stage : stages) {
-      LOG.debug("Stage dump: " + stage.toString());
+      LOG.debug("Stage dump: {}", stage);
     }
 
     Assert.assertTrue(!stages.isEmpty());
@@ -3694,7 +3693,7 @@ public class AmbariManagementControllerTest {
     // FIXME check stage count
 
     for (Stage stage : stages) {
-      LOG.debug("Stage dump: " + stage.toString());
+      LOG.debug("Stage dump: {}", stage);
     }
 
     // FIXME verify stages content - execution commands, etc
@@ -3864,7 +3863,7 @@ public class AmbariManagementControllerTest {
     // FIXME check stage count
 
     for (Stage stage : stages) {
-      LOG.debug("Stage dump: " + stage.toString());
+      LOG.debug("Stage dump: {}", stage);
     }
 
     // FIXME verify stages content - execution commands, etc
@@ -5255,7 +5254,7 @@ public class AmbariManagementControllerTest {
       List<HostRoleCommand> hrcs = stage.getOrderedHostRoleCommands();
 
       for (HostRoleCommand hrc : hrcs) {
-        LOG.debug("role: " + hrc.getRole());
+        LOG.debug("role: {}", hrc.getRole());
         if (hrc.getRole().toString().equals("HDFS_CLIENT")) {
           if (hrc.getHostName().equals(host3)) {
             hdfsCmdHost3 = hrc;
@@ -5494,7 +5493,7 @@ public class AmbariManagementControllerTest {
 
     stages = actionDB.getAllStages(trackAction.getRequestId());
     for (Stage s : stages) {
-      LOG.info("Stage dump : " + s.toString());
+      LOG.info("Stage dump : " + s);
     }
     Assert.assertEquals(1, stages.size());
 
@@ -10398,7 +10397,7 @@ public class AmbariManagementControllerTest {
       add(configRequest);
     }});
     for(ConfigurationResponse resp : requestedConfigs) {
-      String secretName = "SECRET:hdfs-site:"+resp.getVersion().toString()+":test.password";
+      String secretName = "SECRET:hdfs-site:"+ resp.getVersion() +":test.password";
       if(resp.getConfigs().containsKey("test.password")) {
         assertEquals(resp.getConfigs().get("test.password"), secretName);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AbstractJDBCResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AbstractJDBCResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AbstractJDBCResourceProviderTest.java
index 07540bf..6c47ae1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AbstractJDBCResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AbstractJDBCResourceProviderTest.java
@@ -59,7 +59,7 @@ public class AbstractJDBCResourceProviderTest {
     AbstractJDBCResourceProvider<TestFields> provider = new TestAbstractJDBCResourceProviderImpl(
         requestedIds, null);
     Assert.assertEquals(
-        TestFields.field1.toString() + "," + TestFields.field2.toString(),
+        TestFields.field1 + "," + TestFields.field2,
         provider.getDBFieldString(requestedIds));
     Assert.assertEquals(TestFields.field1.toString(),
         provider.getDBFieldString(Collections.singleton(property1)));

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CredentialResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CredentialResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CredentialResourceProviderTest.java
index 7cc6702..89ed022 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CredentialResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CredentialResourceProviderTest.java
@@ -385,7 +385,7 @@ public class CredentialResourceProviderTest {
       } else if ("alias3".equals(alias)) {
         Assert.assertEquals(CredentialStoreType.TEMPORARY.name().toLowerCase(), type);
       } else {
-        Assert.fail("Unexpected alias in list: " + alias.toString());
+        Assert.fail("Unexpected alias in list: " + alias);
       }
     }
 
@@ -462,7 +462,7 @@ public class CredentialResourceProviderTest {
       if ("alias1".equals(alias)) {
         Assert.assertEquals(CredentialStoreType.TEMPORARY.name().toLowerCase(), type);
       } else {
-        Assert.fail("Unexpected alias in list: " + alias.toString());
+        Assert.fail("Unexpected alias in list: " + alias);
       }
     }
 
@@ -658,7 +658,7 @@ public class CredentialResourceProviderTest {
       if ("alias1".equals(alias)) {
         Assert.assertEquals(CredentialStoreType.TEMPORARY.name().toLowerCase(), type);
       } else {
-        Assert.fail("Unexpected alias in list: " + alias.toString());
+        Assert.fail("Unexpected alias in list: " + alias);
       }
     }
 
@@ -674,7 +674,7 @@ public class CredentialResourceProviderTest {
       if ("alias1".equals(alias)) {
         Assert.assertEquals(CredentialStoreType.PERSISTED.name().toLowerCase(), type);
       } else {
-        Assert.fail("Unexpected alias in list: " + alias.toString());
+        Assert.fail("Unexpected alias in list: " + alias);
       }
     }
 
@@ -821,7 +821,7 @@ public class CredentialResourceProviderTest {
       if ("alias1".equals(alias)) {
         Assert.assertEquals(CredentialStoreType.TEMPORARY.name().toLowerCase(), type);
       } else {
-        Assert.fail("Unexpected alias in list: " + alias.toString());
+        Assert.fail("Unexpected alias in list: " + alias);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaMetricTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaMetricTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaMetricTest.java
index 2df730b..7289674 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaMetricTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaMetricTest.java
@@ -88,7 +88,7 @@ public class GangliaMetricTest {
     listTemporalMetrics.add(new GangliaMetric.TemporalMetric("0.0", new Long(17)));
     listTemporalMetrics.add(new GangliaMetric.TemporalMetric("0.0", new Long(18)));
     instance.setDatapointsFromList(listTemporalMetrics);
-    System.out.println(instance.toString());
+    System.out.println(instance);
     assertTrue(instance.getDatapoints().length == 11);
   }  
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
index 550cc9f..271d536 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
@@ -106,7 +106,7 @@ import com.google.inject.persist.Transactional;
 @Singleton
 public class OrmTestHelper {
 
-  private static Logger LOG = LoggerFactory.getLogger(OrmTestHelper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OrmTestHelper.class);
 
   private AtomicInteger uniqueCounter = new AtomicInteger();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAOTest.java
index 92d8d60..d596084 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAOTest.java
@@ -91,7 +91,7 @@ public class RepositoryVersionDAOTest {
 
     // Assert the version must be unique
     RepositoryVersionEntity dupVersion = new RepositoryVersionEntity();
-    dupVersion.setDisplayName("display name " + uuid.toString());
+    dupVersion.setDisplayName("display name " + uuid);
     dupVersion.setOperatingSystems("repositories");
     dupVersion.setStack(stackEntity);
     dupVersion.setVersion(first.getVersion());

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/security/CertGenerationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/CertGenerationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/CertGenerationTest.java
index 784fd86..0ae3001 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/CertGenerationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/CertGenerationTest.java
@@ -35,14 +35,14 @@ import org.apache.ambari.server.utils.ShellCommandUtil;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
@@ -56,7 +56,7 @@ public class CertGenerationTest {
   private static final int PASS_FILE_NAME_LEN = 20;
   private static final float MAX_PASS_LEN = 100;
 
-  private static Log LOG = LogFactory.getLog(CertGenerationTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CertGenerationTest.class);
   public static TemporaryFolder temp = new TemporaryFolder();
 
   private static Injector injector;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/security/SslExecutionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/SslExecutionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/SslExecutionTest.java
index f91f5b7..3614be7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/SslExecutionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/SslExecutionTest.java
@@ -26,12 +26,12 @@ import java.util.Properties;
 
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.state.stack.OsFamily;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
@@ -40,7 +40,7 @@ import com.google.inject.Injector;
 
 public class SslExecutionTest {
 
-  private static Log LOG = LogFactory.getLog(SslExecutionTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SslExecutionTest.class);
   public TemporaryFolder temp = new TemporaryFolder();
 
   Injector injector;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/security/encryption/MasterKeyServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/encryption/MasterKeyServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/encryption/MasterKeyServiceTest.java
index f289c11..5f76072 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/encryption/MasterKeyServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/encryption/MasterKeyServiceTest.java
@@ -31,8 +31,6 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
@@ -40,6 +38,8 @@ import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import junit.framework.Assert;
 import junit.framework.TestCase;
@@ -51,7 +51,7 @@ public class MasterKeyServiceTest extends TestCase {
   @Rule
   public TemporaryFolder tmpFolder = new TemporaryFolder();
   private String fileDir;
-  private static final Log LOG = LogFactory.getLog
+  private static final Logger LOG = LoggerFactory.getLogger
       (MasterKeyServiceTest.class);
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
index b85a901..3d8b87d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
@@ -56,12 +56,12 @@ import org.apache.ambari.server.state.HostHealthStatus.HealthStatus;
 import org.apache.ambari.server.state.HostState;
 import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.StackId;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Guice;
 import com.google.inject.Injector;
@@ -72,7 +72,7 @@ public class HostTest {
   private Clusters clusters;
   private HostDAO hostDAO;
   private OrmTestHelper helper;
-  private static Log LOG = LogFactory.getLog(HostTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HostTest.class);
 
   @Before
    public void setup() throws AmbariException{

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index 4dedc2f..dbf0c8f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@ -78,7 +78,7 @@ import com.google.inject.Inject;
 import com.google.inject.Injector;
 
 public class ServiceComponentHostTest {
-  private static Logger LOG = LoggerFactory.getLogger(ServiceComponentHostTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ServiceComponentHostTest.class);
   @Inject
   private Injector injector;
   @Inject
@@ -177,8 +177,7 @@ public class ServiceComponentHostTest {
     try {
       s = c.getService(svc);
     } catch (ServiceNotFoundException e) {
-      LOG.debug("Calling service create"
-          + ", serviceName=" + svc);
+      LOG.debug("Calling service create, serviceName={}", svc);
 
       s = serviceFactory.createNew(c, svc, repositoryVersion);
       c.addService(s);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/testing/DBInconsistencyTests.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/testing/DBInconsistencyTests.java b/ambari-server/src/test/java/org/apache/ambari/server/testing/DBInconsistencyTests.java
index 506846b..d70adad 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/testing/DBInconsistencyTests.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/testing/DBInconsistencyTests.java
@@ -55,7 +55,7 @@ import com.google.inject.persist.PersistService;
 
 public class DBInconsistencyTests {
 
-  private static Logger LOG = LoggerFactory.getLogger(DBInconsistencyTests.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DBInconsistencyTests.class);
 
   @Inject
   private Injector injector;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java
index e311fea..207e958 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java
@@ -193,7 +193,7 @@ public class UpgradeTest {
     UpgradeCatalog targetUpgradeCatalog = AbstractUpgradeCatalog
         .getUpgradeCatalog(targetVersion);
 
-    LOG.debug("Target upgrade catalog. " + targetUpgradeCatalog);
+    LOG.debug("Target upgrade catalog. {}", targetUpgradeCatalog);
 
     // Read source version from DB
     String sourceVersion = schemaUpgradeHelper.readSourceVersion();


[26/50] [abbrv] ambari git commit: AMBARI-20122 - Stack advisor needs to recommend dependency for slaves and masters

Posted by nc...@apache.org.
AMBARI-20122 - Stack advisor needs to recommend dependency for slaves and masters


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0b6d0dce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0b6d0dce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0b6d0dce

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 0b6d0dcef10ec60c83ad055567b487104573ae62
Parents: 7dc91c5
Author: Tim Thorpe <tt...@apache.org>
Authored: Thu Jun 8 08:51:30 2017 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Thu Jun 8 08:51:30 2017 -0700

----------------------------------------------------------------------
 .../src/main/resources/stacks/stack_advisor.py  | 115 +++++++++++++-
 .../stacks/2.0.6/common/test_stack_advisor.py   | 153 +++++++++++++++++++
 2 files changed, 263 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d0dce/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py
index 4a81dc6..6bc8fe4 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -756,15 +756,19 @@ class DefaultStackAdvisor(StackAdvisor):
       if hostName not in hostsComponentsMap:
         hostsComponentsMap[hostName] = []
 
+    #Sort the services so that the dependent services will be processed before those that depend on them.
+    sortedServices = self.getServicesSortedByDependencies(services)
     #extend hostsComponentsMap' with MASTER components
-    for service in services["services"]:
+    for service in sortedServices:
       masterComponents = [component for component in service["components"] if self.isMasterComponent(component)]
       serviceName = service["StackServices"]["service_name"]
       serviceAdvisor = self.getServiceAdvisor(serviceName)
       for component in masterComponents:
         componentName = component["StackServiceComponents"]["component_name"]
         advisor = serviceAdvisor if serviceAdvisor is not None else self
-        hostsForComponent = advisor.getHostsForMasterComponent(services, hosts, component, hostsList)
+        #Filter the hosts such that only hosts that meet the dependencies are included (if possible)
+        filteredHosts = self.getFilteredHostsBasedOnDependencies(services, component, hostsList, hostsComponentsMap)
+        hostsForComponent = advisor.getHostsForMasterComponent(services, hosts, component, filteredHosts)
 
         #extend 'hostsComponentsMap' with 'hostsForComponent'
         for hostName in hostsForComponent:
@@ -778,7 +782,7 @@ class DefaultStackAdvisor(StackAdvisor):
     utilizedHosts = [item for sublist in usedHostsListList for item in sublist]
     freeHosts = [hostName for hostName in hostsList if hostName not in utilizedHosts]
 
-    for service in services["services"]:
+    for service in sortedServices:
       slaveClientComponents = [component for component in service["components"]
                                if self.isSlaveComponent(component) or self.isClientComponent(component)]
       serviceName = service["StackServices"]["service_name"]
@@ -786,7 +790,10 @@ class DefaultStackAdvisor(StackAdvisor):
       for component in slaveClientComponents:
         componentName = component["StackServiceComponents"]["component_name"]
         advisor = serviceAdvisor if serviceAdvisor is not None else self
-        hostsForComponent = advisor.getHostsForSlaveComponent(services, hosts, component, hostsList, freeHosts)
+        #Filter the hosts and free hosts such that only hosts that meet the dependencies are included (if possible)
+        filteredHosts = self.getFilteredHostsBasedOnDependencies(services, component, hostsList, hostsComponentsMap)
+        filteredFreeHosts = self.filterList(freeHosts, filteredHosts)
+        hostsForComponent = advisor.getHostsForSlaveComponent(services, hosts, component, filteredHosts, filteredFreeHosts)
 
         #extend 'hostsComponentsMap' with 'hostsForComponent'
         for hostName in hostsForComponent:
@@ -796,7 +803,7 @@ class DefaultStackAdvisor(StackAdvisor):
             hostsComponentsMap[hostName].append( { "name": componentName } )
 
     #colocate custom services
-    for service in services["services"]:
+    for service in sortedServices:
       serviceName = service["StackServices"]["service_name"]
       serviceAdvisor = self.getServiceAdvisor(serviceName)
       if serviceAdvisor is not None:
@@ -866,6 +873,104 @@ class DefaultStackAdvisor(StackAdvisor):
 
     return hostsForComponent
 
+  def getServicesSortedByDependencies(self, services):
+    """
+    Sorts the services based on their dependencies.  This is limited to non-conditional host scope dependencies.
+    Services with no dependencies will go first.  Services with dependencies will go after the services they are dependent on.
+    If there are circular dependencies, the services will go in the order in which they were processed.
+    """
+    processedServices = []
+    sortedServices = []
+
+    for service in services["services"]:
+      self.sortServicesByDependencies(services, service, processedServices, sortedServices)
+
+    return sortedServices
+
+  def sortServicesByDependencies(self, services, service, processedServices, sortedServices):
+    """
+    Sorts the services based on their dependencies.  This is limited to non-conditional host scope dependencies.
+    Services with no dependencies will go first.  Services with dependencies will go after the services they are dependent on.
+    If there are circular dependencies, the services will go in the order in which they were processed.
+    """
+    if service in processedServices:
+      return
+
+    processedServices.append(service)
+
+    for component in service["components"]:
+      dependencies = [] if "dependencies" not in component else component['dependencies']
+      for dependency in dependencies:
+        # accounts only for dependencies that are not conditional
+        conditionsPresent =  "conditions" in dependency["Dependencies"] and dependency["Dependencies"]["conditions"]
+        scope = "cluster" if "scope" not in dependency["Dependencies"] else dependency["Dependencies"]["scope"]
+        if not conditionsPresent and scope == "host":
+          componentName = component["StackServiceComponents"]["component_name"]
+          requiredComponentName = dependency["Dependencies"]["component_name"]
+          requiredService = self.getServiceForComponentName(services, requiredComponentName)
+          self.sortServicesByDependencies(services, requiredService, processedServices, sortedServices)
+
+    sortedServices.append(service)
+
+  def getFilteredHostsBasedOnDependencies(self, services, component, hostsList, hostsComponentsMap):
+    """
+    Returns a list of hosts that only includes the ones which have all host scope dependencies already assigned to them.
+    If an empty list would be returned, instead the full list of hosts are returned.
+    In that case, we can't possibly return a valid recommended layout so we will at least return a fully filled layout.
+    """
+    removeHosts = []
+    dependencies = [] if "dependencies" not in component else component['dependencies']
+    for dependency in dependencies:
+      # accounts only for dependencies that are not conditional
+      conditionsPresent =  "conditions" in dependency["Dependencies"] and dependency["Dependencies"]["conditions"]
+      if not conditionsPresent:
+        componentName = component["StackServiceComponents"]["component_name"]
+        requiredComponentName = dependency["Dependencies"]["component_name"]
+        requiredComponent = self.getRequiredComponent(services, requiredComponentName)
+
+        # We only deal with "host" scope.
+        if (requiredComponent is not None) and (requiredComponent["component_category"] != "CLIENT"):
+          scope = "cluster" if "scope" not in dependency["Dependencies"] else dependency["Dependencies"]["scope"]
+          if scope == "host":
+            for host, hostComponents in hostsComponentsMap.iteritems():
+              isRequiredIncluded = False
+              for component in hostComponents:
+                currentComponentName = None if "name" not in component else component["name"]
+                if requiredComponentName == currentComponentName:
+                  isRequiredIncluded = True
+              if not isRequiredIncluded:
+                removeHosts.append(host)
+
+    filteredHostsList = []
+    for host in hostsList:
+      if host not in removeHosts:
+        filteredHostsList.append(host)
+    return filteredHostsList
+
+  def filterList(self, list, filter):
+    """
+    Returns the union of the two lists passed in (list and filter params).
+    """
+    filteredList = []
+    for item in list:
+      if item in filter:
+        filteredList.append(item)
+    return filteredList
+
+  def getServiceForComponentName(self, services, componentName):
+    """
+    Return service for component name
+
+    :type services dict
+    :type componentName str
+    """
+    for service in services["services"]:
+      for component in service["components"]:
+        if self.getComponentName(component) == componentName:
+          return service
+
+    return None
+
   def isComponentUsingCardinalityForLayout(self, componentName):
     return False
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d0dce/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 41c57f6..b6f1965 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -191,6 +191,159 @@ class TestHDP206StackAdvisor(TestCase):
     ]
     self.assertValidationResult(expectedItems, result)
 
+
+  def test_handleComponentDependencies(self):
+    services = {
+      "Versions":
+        {
+          "stack_name":"HDP",
+          "stack_version":"2.0.6"
+        },
+      "services" : [
+        {
+          "StackServices" : {
+            "service_name" : "HDFS",
+            "service_version" : "2.0.6",
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "stack_version": "2.0.6",
+                "stack_name": "HDP",
+                "component_category": "MASTER",
+                "is_client": False,
+                "is_master": True,
+                "service_name": "HDFS",
+                "cardinality": "1-2",
+                "hostnames": [],
+                "component_name": "NAMENODE",
+                "display_name": "NameNode"
+              },
+              "dependencies": [
+                {
+                  "Dependencies": {
+                    "stack_name": "HDP",
+                    "stack_version": "2.0.6",
+                    "scope": "cluster",
+                    "conditions": [
+                      {
+                        "configType": "hdfs-site",
+                        "property": "dfs.nameservices",
+                        "type": "PropertyExists",
+                      }
+                    ],
+                    "dependent_service_name": "HDFS",
+                    "dependent_component_name": "NAMENODE",
+                    "component_name": "ZOOKEEPER_SERVER"
+                  }
+                }
+              ]
+            }
+          ]
+        },
+        {
+          "StackServices" : {
+            "service_name" : "ZOOKEEPER",
+            "service_version" : "2.0.6",
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "stack_version": "2.0.6",
+                "stack_name": "HDP",
+                "component_category": "MASTER",
+                "is_client": False,
+                "is_master": True,
+                "service_name": "HDFS",
+                "cardinality": "1-2",
+                "hostnames": [],
+                "component_name": "ZOOKEEPER_SERVER",
+                "display_name": "ZooKeeper Server"
+              },
+              "dependencies": []
+            }
+          ]
+        }
+      ]
+    }
+
+    nameNodeDependencies = services["services"][0]["components"][0]["dependencies"][0]["Dependencies"]
+
+    # Tests for master component with dependencies
+
+    hosts = self.prepareHosts(["c6401.ambari.apache.org", "c6402.ambari.apache.org", "c6403.ambari.apache.org", "c6404.ambari.apache.org"])
+    services["services"][1]["components"][0]["StackServiceComponents"]["hostnames"] = ["c6402.ambari.apache.org", "c6403.ambari.apache.org"]
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when there are conditions and cluster scope
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    nameNodeDependencies["scope"] = "host"
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when there are conditions (even for host scope)
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    nameNodeDependencies["scope"] = "cluster"
+    originalConditions = nameNodeDependencies["conditions"]
+    nameNodeDependencies["conditions"] = []
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when scope is cluster
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    nameNodeDependencies["scope"] = "host"
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are enforced for host scope without conditions
+    #self.assertEquals(recommendations, "")
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][0]['components']), 2)
+
+    services["services"][1]["components"][0]["StackServiceComponents"]["is_master"] = False
+    services["services"][1]["components"][0]["StackServiceComponents"]["component_category"] = "CLIENT"
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when depending on client components
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    # Tests for slave component with dependencies
+    services["services"][0]["components"][0]["StackServiceComponents"]["component_category"] = "SLAVE"
+    services["services"][0]["components"][0]["StackServiceComponents"]["is_master"] = False
+    services["services"][1]["components"][0]["StackServiceComponents"]["component_category"] = "MASTER"
+    services["services"][1]["components"][0]["StackServiceComponents"]["is_master"] = True
+
+    nameNodeDependencies["scope"] = "cluster"
+    nameNodeDependencies["conditions"] = originalConditions
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when there are conditions and cluster scope
+    self.assertEquals(recommendations['blueprint']['host_groups'][2]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][2]['components']), 1)
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    nameNodeDependencies["scope"] = "host"
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when there are conditions (even for host scope)
+    self.assertEquals(recommendations['blueprint']['host_groups'][2]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][2]['components']), 1)
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    nameNodeDependencies["scope"] = "cluster"
+    nameNodeDependencies["conditions"] = []
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are ignored when scope is cluster
+    self.assertEquals(recommendations['blueprint']['host_groups'][2]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][2]['components']), 1)
+    self.assertEquals(recommendations['blueprint']['host_groups'][3]['components'][0]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][3]['components']), 1)
+
+    nameNodeDependencies["scope"] = "host"
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Assert that dependencies are enforced when host scope and no conditions
+    self.assertEquals(recommendations['blueprint']['host_groups'][1]['components'][1]['name'], 'NAMENODE')
+    self.assertEquals(len(recommendations['blueprint']['host_groups'][1]['components']), 2)
+
+
   def test_validateRequiredComponentsPresent(self):
     services = {
       "Versions":


[49/50] [abbrv] ambari git commit: AMBARI-21204 Yarn stopped by itself after start. HA run (dsen)

Posted by nc...@apache.org.
AMBARI-21204 Yarn stopped by itself after start. HA run (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8d129d41
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8d129d41
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8d129d41

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 8d129d414a9e33975620d5b0c430a7fba6d914b3
Parents: d0eaaae
Author: Dmytro Sen <ds...@apache.org>
Authored: Mon Jun 12 18:04:27 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Mon Jun 12 18:04:27 2017 +0300

----------------------------------------------------------------------
 .../org/apache/ambari/tools/zk/ZkMigrator.java  | 44 +++++++++++++-
 .../apache/ambari/tools/zk/ZkMigratorTest.java  | 60 +++++++++++++++++++-
 .../core/resources/zkmigrator.py                | 17 +++++-
 .../package/scripts/resourcemanager.py          |  2 +-
 .../package/scripts/resourcemanager.py          |  2 +-
 5 files changed, 117 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8d129d41/ambari-agent/src/main/java/org/apache/ambari/tools/zk/ZkMigrator.java
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/java/org/apache/ambari/tools/zk/ZkMigrator.java b/ambari-agent/src/main/java/org/apache/ambari/tools/zk/ZkMigrator.java
index c100b85..e4b33eb 100644
--- a/ambari-agent/src/main/java/org/apache/ambari/tools/zk/ZkMigrator.java
+++ b/ambari-agent/src/main/java/org/apache/ambari/tools/zk/ZkMigrator.java
@@ -27,9 +27,12 @@ import org.apache.commons.cli.Options;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
 
+import static org.apache.ambari.tools.zk.ZkAcl.append;
+
 /**
  * I'm a command line utility that provides functionality that the official zookeeper-client does not support.
  * E.g. I can set ACLs recursively on a znode.
+ * Also I can remove znode recursively.
  */
 public class ZkMigrator {
   private static final int SESSION_TIMEOUT_MILLIS = 5000;
@@ -37,13 +40,42 @@ public class ZkMigrator {
 
   public static void main(String[] args) throws Exception {
     CommandLine cli = new DefaultParser().parse(options(), args);
-    if (cli.hasOption("connection-string") && cli.hasOption("acl") && cli.hasOption("znode")) {
-      setAcls(cli.getOptionValue("connection-string"), cli.getOptionValue("znode"), ZkAcl.parse(cli.getOptionValue("acl")));
+    if (cli.hasOption("connection-string") && cli.hasOption("znode")) {
+      if (cli.hasOption("acl") && !cli.hasOption("delete")) {
+        setAcls(cli.getOptionValue("connection-string"), cli.getOptionValue("znode"), ZkAcl.parse(cli.getOptionValue("acl")));
+      } else if (cli.hasOption("delete") && !cli.hasOption("acl")) {
+        deleteZnodeRecursively(cli.getOptionValue("connection-string"), cli.getOptionValue("znode"));
+      } else {
+        printHelp();
+      }
     } else {
       printHelp();
     }
   }
 
+  private static void deleteZnodeRecursively(String connectionString, String znode) throws IOException, InterruptedException, KeeperException {
+    ZooKeeper client = ZkConnection.open(connectionString, SESSION_TIMEOUT_MILLIS, CONNECTION_TIMEOUT_MILLIS);
+    try {
+      ZkPathPattern paths = ZkPathPattern.fromString(znode);
+      for (String path : paths.findMatchingPaths(client, "/")) {
+        System.out.println("Recursively deleting znodes with matching path " + path);
+        deleteZnodeRecursively(client, path);
+      }
+    } catch (KeeperException.NoNodeException e) {
+      System.out.println("Could not delete " + znode + ". Reason: " + e.getMessage());
+    } finally {
+      client.close();
+    }
+  }
+
+  private static void deleteZnodeRecursively(ZooKeeper zkClient, String baseNode) throws KeeperException, InterruptedException {
+    for (String child : zkClient.getChildren(baseNode, null)) {
+      deleteZnodeRecursively(zkClient, append(baseNode, child));
+    }
+    System.out.println("Deleting znode " + baseNode);
+    zkClient.delete(baseNode, -1);
+  }
+
   private static Options options() {
     return new Options()
       .addOption(Option.builder("h")
@@ -67,6 +99,11 @@ public class ZkMigrator {
         .desc("znode path")
         .hasArg()
         .argName("znode")
+        .build())
+      .addOption(Option.builder("d")
+        .longOpt("delete")
+        .desc("delete specified znode and all it's children recursively")
+        .argName("delete")
         .build());
   }
 
@@ -82,7 +119,8 @@ public class ZkMigrator {
   }
 
   private static void printHelp() {
-    System.out.println("Usage zkmigrator -connection-string <host:port> -acl <scheme:id:permission> -znode /path/to/znode");
+    System.out.println("Usage zkmigrator -connection-string <host:port> -acl <scheme:id:permission> -znode /path/to/znode\n" +
+                       "              OR -connection-string <host:port> -znode /path/to/znode -delete");
     System.exit(1);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d129d41/ambari-agent/src/test/java/org/apache/ambari/tools/zk/ZkMigratorTest.java
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/java/org/apache/ambari/tools/zk/ZkMigratorTest.java b/ambari-agent/src/test/java/org/apache/ambari/tools/zk/ZkMigratorTest.java
index b7e9c0c..be9727f 100644
--- a/ambari-agent/src/test/java/org/apache/ambari/tools/zk/ZkMigratorTest.java
+++ b/ambari-agent/src/test/java/org/apache/ambari/tools/zk/ZkMigratorTest.java
@@ -23,6 +23,7 @@ import static org.apache.zookeeper.ZooDefs.Perms.READ;
 import static org.apache.zookeeper.ZooDefs.Perms.WRITE;
 import static org.apache.zookeeper.ZooDefs.Perms.ALL;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.net.ServerSocket;
@@ -32,6 +33,7 @@ import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.retry.RetryOneTime;
 import org.apache.curator.test.TestingServer;
+import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.Id;
 import org.junit.After;
@@ -69,6 +71,38 @@ public class ZkMigratorTest {
   }
 
   @Test
+  public void testDeleteRecursive() throws Exception {
+    // Given
+    path("/parent");
+    path("/parent/a");
+    path("/parent/b");
+    path("/parent/b/q");
+    // When
+    deleteZnode("/parent");
+    // Then
+    assertRemoved("/parent");
+    assertRemoved("/parent/a");
+    assertRemoved("/parent/b");
+    assertRemoved("/parent/b/q");
+  }
+
+  @Test
+  public void testDeleteRecursiveWildcard() throws Exception {
+    // Given
+    path("/parent");
+    path("/parent/a");
+    path("/parent/b");
+    path("/parent/b/q");
+    // When
+    deleteZnode("/parent/*");
+    // Then
+    assertHasNode("/parent");
+    assertRemoved("/parent/a");
+    assertRemoved("/parent/b");
+    assertRemoved("/parent/b/q");
+  }
+
+  @Test
   public void testSetAclsRecursively() throws Exception {
     // Given
     path("/parent");
@@ -179,6 +213,14 @@ public class ZkMigratorTest {
     });
   }
 
+  private void deleteZnode(String path) throws Exception {
+    ZkMigrator.main(new String[] {
+      "-connection-string", zkTestServer.getConnectString(),
+      "-znode", path,
+      "-delete"
+    });
+  }
+
   private void assertHasAcl(String path, String scheme, String id, int permission) throws Exception {
     List<ACL> acls = cli.getACL().forPath(path);
     assertEquals("expected 1 acl on " + path, 1, acls.size());
@@ -186,6 +228,22 @@ public class ZkMigratorTest {
     assertEquals(permission, acls.get(0).getPerms());
   }
 
+  private void assertRemoved(String path) throws Exception {
+    try {
+      cli.getACL().forPath(path);
+      assertTrue(false);
+    } catch (KeeperException.NoNodeException e) {
+      //expected
+    }
+  }
+  private void assertHasNode(String path) throws Exception {
+    try {
+      cli.getACL().forPath(path);
+    } catch (KeeperException.NoNodeException e) {
+      assertTrue(false);
+    }
+  }
+
   static class Port {
     public static int free() throws IOException {
       ServerSocket socket = null;
@@ -199,4 +257,4 @@ public class ZkMigratorTest {
       }
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d129d41/ambari-common/src/main/python/resource_management/core/resources/zkmigrator.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/resources/zkmigrator.py b/ambari-common/src/main/python/resource_management/core/resources/zkmigrator.py
index 344b9a4..49fb5e8 100644
--- a/ambari-common/src/main/python/resource_management/core/resources/zkmigrator.py
+++ b/ambari-common/src/main/python/resource_management/core/resources/zkmigrator.py
@@ -36,12 +36,25 @@ class ZkMigrator:
   def set_acls(self, znode, acl, tries=3):
     Logger.info(format("Setting ACL on znode {znode} to {acl}"))
     Execute(
-      self._command(znode, acl), \
+      self._acl_command(znode, acl), \
       user=self.user, \
       environment={ 'JAVA_HOME': self.java_home }, \
       logoutput=True, \
       tries=tries)
 
-  def _command(self, znode, acl):
+  def delete_node(self, znode, tries=3):
+    Logger.info(format("Removing znode {znode}"))
+    Execute(
+      self._delete_command(znode), \
+      user=self.user, \
+      environment={ 'JAVA_HOME': self.java_home }, \
+      logoutput=True, \
+      tries=tries)
+
+  def _acl_command(self, znode, acl):
     return "{0} -Djava.security.auth.login.config={1} -jar {2} -connection-string {3} -znode {4} -acl {5}".format( \
       self.java_exec, self.jaas_file, self.zkmigrator_jar, self.zk_host, znode, acl)
+
+  def _delete_command(self, znode):
+    return "{0} -Djava.security.auth.login.config={1} -jar {2} -connection-string {3} -znode {4} -delete".format( \
+      self.java_exec, self.jaas_file, self.zkmigrator_jar, self.zk_host, znode)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d129d41/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index 81b99e6..7d024b1 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -181,8 +181,8 @@ class ResourcemanagerDefault(Resourcemanager):
       params.yarn_jaas_file, \
       params.yarn_user)
     zkmigrator.set_acls(params.rm_zk_znode, 'world:anyone:crdwa')
-    zkmigrator.set_acls(params.rm_zk_failover_znode, 'world:anyone:crdwa')
     zkmigrator.set_acls(params.hadoop_registry_zk_root, 'world:anyone:crdwa')
+    zkmigrator.delete_node(params.rm_zk_failover_znode)
 
   def wait_for_dfs_directories_created(self, *dirs):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d129d41/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
index 78675bf..f6d6315 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
@@ -122,8 +122,8 @@ class ResourcemanagerDefault(Resourcemanager):
       params.yarn_jaas_file, \
       params.yarn_user)
     zkmigrator.set_acls(params.rm_zk_znode, 'world:anyone:crdwa')
-    zkmigrator.set_acls(params.rm_zk_failover_znode, 'world:anyone:crdwa')
     zkmigrator.set_acls(params.hadoop_registry_zk_root, 'world:anyone:crdwa')
+    zkmigrator.delete_node(params.rm_zk_failover_znode)
 
   def start(self, env, upgrade_type=None):
     import params


[36/50] [abbrv] ambari git commit: AMBARI-21128 Add AMS HA support to local metrics aggregator application (dsen)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
index e5da9ba..d161269 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
@@ -138,7 +138,7 @@ class Controller(threading.Thread):
     if self.aggregator:
       self.aggregator.stop()
     if self.aggregator_watchdog:
-      self.aggregator.stop()
+      self.aggregator_watchdog.stop()
     self.aggregator = Aggregator(self.config, self._stop_handler)
     self.aggregator_watchdog = AggregatorWatchdog(self.config, self._stop_handler)
     self.aggregator.start()


[35/50] [abbrv] ambari git commit: AMBARI-21209. Incorrect ambari DDL script for Oracle DB (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-21209. Incorrect ambari DDL script for Oracle DB (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/190ecad0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/190ecad0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/190ecad0

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 190ecad0063340d163d220baddbb6b40b6d7ab83
Parents: 215bd7a
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Jun 9 12:54:05 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Jun 9 12:54:05 2017 +0300

----------------------------------------------------------------------
 .../src/main/resources/Ambari-DDL-Oracle-CREATE.sql     | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/190ecad0/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 903fc9e..8f9406d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -76,7 +76,7 @@ CREATE TABLE clusterconfig (
   config_data CLOB NOT NULL,
   config_attributes CLOB,
   create_timestamp NUMBER(19) NOT NULL,
-  service_deleted SMALLINT NOT NULL DEFAULT 0,
+  service_deleted SMALLINT DEFAULT 0 NOT NULL,
   selected_timestamp NUMBER(19) DEFAULT 0 NOT NULL,
   CONSTRAINT PK_clusterconfig PRIMARY KEY (config_id),
   CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id),
@@ -324,8 +324,8 @@ CREATE TABLE request (
   request_context VARCHAR(255),
   request_type VARCHAR(255),
   start_time NUMBER(19) NOT NULL,
-  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
-  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
+  display_status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
   cluster_host_info BLOB NOT NULL,
   CONSTRAINT PK_request PRIMARY KEY (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
@@ -341,8 +341,8 @@ CREATE TABLE stage (
   command_params BLOB,
   host_params BLOB,
   command_execution_type VARCHAR2(32) DEFAULT 'STAGE' NOT NULL,
-  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
-  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
+  display_status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
   CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -361,7 +361,7 @@ CREATE TABLE host_role_command (
   start_time NUMBER(19) NOT NULL,
   original_start_time NUMBER(19) NOT NULL,
   end_time NUMBER(19),
-  status VARCHAR2(255) NOT NULL DEFAULT 'PENDING',
+  status VARCHAR2(255) DEFAULT 'PENDING' NOT NULL,
   auto_skip_on_failure NUMBER(1) DEFAULT 0 NOT NULL,
   std_error BLOB NULL,
   std_out BLOB NULL,


[09/50] [abbrv] ambari git commit: AMBARI-21122 - Part One: Specify the script directly in alert target for script-based alert dispatchers (Yao Lei via jonathanhurley)

Posted by nc...@apache.org.
AMBARI-21122 - Part One:  Specify the script directly in alert target for script-based alert dispatchers (Yao Lei via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4247f691
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4247f691
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4247f691

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4247f6919c329fc3da9e4ea8a0aa62aacd4793e3
Parents: e61fea5
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Jun 6 10:22:45 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Jun 6 10:22:45 2017 -0400

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     | 17 +++++
 .../dispatchers/AlertScriptDispatcher.java      | 45 ++++++++++++-
 .../dispatchers/AlertScriptDispatcherTest.java  | 67 ++++++++++++++++++++
 3 files changed, 127 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4247f691/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 965b57b..fb06e6d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -2721,6 +2721,14 @@ public class Configuration {
   public static final ConfigurationProperty<Integer> TLS_EPHEMERAL_DH_KEY_SIZE = new ConfigurationProperty<>(
     "security.server.tls.ephemeral_dh_key_size", 2048);
 
+  /**
+   * The directory for scripts which are used by the alert notification dispatcher.
+   */
+  @Markdown(description = "The directory for scripts which are used by the alert notification dispatcher.")
+  public static final ConfigurationProperty<String> DISPATCH_PROPERTY_SCRIPT_DIRECTORY = new ConfigurationProperty<>(
+          "notification.dispatch.alert.script.directory",AmbariPath.getPath("/var/lib/ambari-server/resources/scripts"));
+
+
   private static final Logger LOG = LoggerFactory.getLogger(
     Configuration.class);
 
@@ -5587,6 +5595,15 @@ public class Configuration {
   }
 
   /**
+   * Gets the dispatch script directory.
+   *
+   * @return the dispatch script directory
+   */
+  public String getDispatchScriptDirectory() {
+    return getProperty(DISPATCH_PROPERTY_SCRIPT_DIRECTORY);
+  }
+
+  /**
    * Generates a markdown table which includes:
    * <ul>
    * <li>Property key name</li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4247f691/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcher.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcher.java b/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcher.java
index 84bfe52..60fe4f4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcher.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcher.java
@@ -17,6 +17,7 @@
  */
 package org.apache.ambari.server.notifications.dispatchers;
 
+import java.io.File;
 import java.util.Map;
 import java.util.concurrent.Executor;
 import java.util.concurrent.LinkedBlockingQueue;
@@ -74,6 +75,13 @@ public class AlertScriptDispatcher implements NotificationDispatcher {
   public static final String DISPATCH_PROPERTY_SCRIPT_CONFIG_KEY = "ambari.dispatch-property.script";
 
   /**
+   * A dispatch property that instructs this dispatcher to lookup script by filename
+   * from {@link org.apache.ambari.server.state.alert.AlertTarget}.
+   */
+  public static final String DISPATCH_PROPERTY_SCRIPT_FILENAME_KEY  = "ambari.dispatch-property.script.filename";
+
+
+  /**
    * Logger.
    */
   private static final Logger LOG = LoggerFactory.getLogger(AlertScriptDispatcher.class);
@@ -166,8 +174,13 @@ public class AlertScriptDispatcher implements NotificationDispatcher {
    */
   @Override
   public void dispatch(Notification notification) {
-    String scriptKey = getScriptConfigurationKey(notification);
-    String script = m_configuration.getProperty(scriptKey);
+    String scriptKey = null;
+    String script = getScriptLocation(notification);
+
+    if( null == script){ // Script filename is null.
+        scriptKey = getScriptConfigurationKey(notification);
+        script = m_configuration.getProperty(scriptKey);
+    }
 
     // this dispatcher requires a script to run
     if (null == script) {
@@ -208,6 +221,34 @@ public class AlertScriptDispatcher implements NotificationDispatcher {
   }
 
   /**
+   * Gets the dispatch script location from ambari.properties and notification.
+   *
+   * @param notification
+   * @return the dispatch script location.If script filename is {@code null},
+   *         {@code null} will be returned.
+   */
+
+  String getScriptLocation(Notification notification){
+    String scriptName = null;
+    String scriptDir = null;
+
+    if( null == notification || null == notification.DispatchProperties )
+        return null;
+
+    scriptName = notification.DispatchProperties.get(DISPATCH_PROPERTY_SCRIPT_FILENAME_KEY);
+    if( null == scriptName) {
+        LOG.warn("the {} configuration property was not found for dispatching notification",
+                DISPATCH_PROPERTY_SCRIPT_FILENAME_KEY);
+        return null;
+    }
+
+    scriptDir = m_configuration.getDispatchScriptDirectory();
+
+    return scriptDir + File.separator + scriptName;
+  }
+
+
+  /**
    * {@inheritDoc}
    * <p/>
    * Returns {@code false} always.

http://git-wip-us.apache.org/repos/asf/ambari/blob/4247f691/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcherTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcherTest.java b/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcherTest.java
index f1f320d..4b1480c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcherTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AlertScriptDispatcherTest.java
@@ -60,6 +60,8 @@ public class AlertScriptDispatcherTest {
 
   private static final String SCRIPT_CONFIG_VALUE = "/foo/script.py";
 
+  private static final String DISPATCH_PROPERTY_SCRIPT_DIRECTORY_KEY = "notification.dispatch.alert.script.directory";
+
   private Injector m_injector;
 
   @Inject
@@ -190,6 +192,71 @@ public class AlertScriptDispatcherTest {
   }
 
   /**
+   * Tests the invocation of method getScriptLocation().
+   */
+  @Test
+  public void testGetScriptLocation() throws Exception {
+    AlertScriptDispatcher dispatcher = (AlertScriptDispatcher) m_dispatchFactory.getDispatcher(TargetType.ALERT_SCRIPT.name());
+    m_injector.injectMembers(dispatcher);
+
+    DispatchCallback callback = EasyMock.createNiceMock(DispatchCallback.class);
+    AlertNotification notification = new AlertNotification();
+    notification.Callback = callback;
+    notification.CallbackIds = Collections.singletonList(UUID.randomUUID().toString());
+    notification.DispatchProperties = new HashMap();
+
+    //1.ambari.dispatch-property.script.filename is not set in notification
+    Assert.assertEquals(dispatcher.getScriptLocation(notification),null);
+
+    //2.ambari.dispatch-property.script.filename is set in notification,but notification.dispatch.alert.script.directory not in ambari.properties
+    final String filename = "foo.py";
+    notification.DispatchProperties.put(AlertScriptDispatcher.DISPATCH_PROPERTY_SCRIPT_FILENAME_KEY,filename);
+    Assert.assertEquals(dispatcher.getScriptLocation(notification),"/var/lib/ambari-server/resources/scripts/foo.py");
+
+    //3.both properties are set
+    final String scriptDirectory = "/var/lib/ambari-server/resources/scripts/foo";
+    m_configuration.setProperty(DISPATCH_PROPERTY_SCRIPT_DIRECTORY_KEY,scriptDirectory);
+    Assert.assertEquals(dispatcher.getScriptLocation(notification),"/var/lib/ambari-server/resources/scripts/foo/foo.py");
+  }
+
+
+  /**
+   * Tests that we will pickup the correct script when script filename is specified on the notification
+   */
+  @Test
+  public void testCustomScriptConfigurationByScriptFilename() throws Exception {
+    final String filename = "foo.py";
+    final String scriptDirectory = "/var/lib/ambari-server/resources/scripts/foo";
+    m_configuration.setProperty(DISPATCH_PROPERTY_SCRIPT_DIRECTORY_KEY,scriptDirectory);
+
+    DispatchCallback callback = EasyMock.createNiceMock(DispatchCallback.class);
+    AlertNotification notification = new AlertNotification();
+    notification.Callback = callback;
+    notification.CallbackIds = Collections.singletonList(UUID.randomUUID().toString());
+
+    notification.DispatchProperties = new HashMap();
+    notification.DispatchProperties.put(AlertScriptDispatcher.DISPATCH_PROPERTY_SCRIPT_FILENAME_KEY,filename);
+
+    callback.onSuccess(notification.CallbackIds);
+    EasyMock.expectLastCall().once();
+
+    AlertScriptDispatcher dispatcher = (AlertScriptDispatcher) m_dispatchFactory.getDispatcher(TargetType.ALERT_SCRIPT.name());
+    m_injector.injectMembers(dispatcher);
+
+    ProcessBuilder powerMockProcessBuilder = m_injector.getInstance(ProcessBuilder.class);
+    EasyMock.expect(dispatcher.getProcessBuilder(dispatcher.getScriptLocation(notification), notification)).andReturn(
+            powerMockProcessBuilder).once();
+
+    EasyMock.replay(callback, dispatcher);
+
+    dispatcher.dispatch(notification);
+
+    EasyMock.verify(callback, dispatcher);
+    PowerMock.verifyAll();
+  }
+
+
+  /**
    * Tests that a process with an error code of 255 causes the failure callback
    * to be invoked.
    *


[21/50] [abbrv] ambari git commit: AMBARI-21182. Agent Host Disk Usage Alert Hardcodes the Stack Directory (aonishuk)

Posted by nc...@apache.org.
AMBARI-21182. Agent Host Disk Usage Alert Hardcodes the Stack Directory (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d21d434a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d21d434a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d21d434a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d21d434a16ec3078f9c2098327b559476bab8bb1
Parents: 3146a19
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Jun 8 15:14:17 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Jun 8 15:14:17 2017 +0300

----------------------------------------------------------------------
 .../src/test/python/host_scripts/TestAlertDiskSpace.py  | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d21d434a/ambari-server/src/test/python/host_scripts/TestAlertDiskSpace.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/host_scripts/TestAlertDiskSpace.py b/ambari-server/src/test/python/host_scripts/TestAlertDiskSpace.py
index 90a5a1e..0d47061 100644
--- a/ambari-server/src/test/python/host_scripts/TestAlertDiskSpace.py
+++ b/ambari-server/src/test/python/host_scripts/TestAlertDiskSpace.py
@@ -41,7 +41,7 @@ class TestAlertDiskSpace(RMFTestCase):
       total = 21673930752L, used = 5695861760L,
       free = 15978068992L, path="/")
 
-    res = alert_disk_space.execute()
+    res = alert_disk_space.execute(configurations={'{{cluster-env/stack_root}}': '/usr/hdp'})
     self.assertEqual(res,
       ('OK', ['Capacity Used: [26.28%, 5.7 GB], Capacity Total: [21.7 GB], path=/']))
 
@@ -50,7 +50,7 @@ class TestAlertDiskSpace(RMFTestCase):
       total = 21673930752L, used = 14521533603L,
       free = 7152397149L, path="/")
 
-    res = alert_disk_space.execute()
+    res = alert_disk_space.execute(configurations={'{{cluster-env/stack_root}}': '/usr/hdp'})
     self.assertEqual(res, (
       'WARNING',
       ['Capacity Used: [67.00%, 14.5 GB], Capacity Total: [21.7 GB], path=/']))
@@ -60,7 +60,7 @@ class TestAlertDiskSpace(RMFTestCase):
       total = 21673930752L, used = 20590234214L,
       free = 1083696538, path="/")
 
-    res = alert_disk_space.execute()
+    res = alert_disk_space.execute(configurations={'{{cluster-env/stack_root}}': '/usr/hdp'})
     self.assertEqual(res, ('CRITICAL',
     ['Capacity Used: [95.00%, 20.6 GB], Capacity Total: [21.7 GB], path=/']))
 
@@ -69,7 +69,7 @@ class TestAlertDiskSpace(RMFTestCase):
       total = 5418482688L, used = 1625544806L,
       free = 3792937882L, path="/")
 
-    res = alert_disk_space.execute()
+    res = alert_disk_space.execute(configurations={'{{cluster-env/stack_root}}': '/usr/hdp'})
     self.assertEqual(res, ('WARNING', [
       'Capacity Used: [30.00%, 1.6 GB], Capacity Total: [5.4 GB], path=/. Total free space is less than 5.0 GB']))
 
@@ -81,7 +81,7 @@ class TestAlertDiskSpace(RMFTestCase):
       total = 21673930752L, used = 5695861760L,
       free = 15978068992L, path="/usr/hdp")
 
-    res = alert_disk_space.execute()
+    res = alert_disk_space.execute(configurations={'{{cluster-env/stack_root}}': '/usr/hdp'})
     self.assertEqual(res,
       ('OK', ['Capacity Used: [26.28%, 5.7 GB], Capacity Total: [21.7 GB], path=/usr/hdp']))
 
@@ -90,6 +90,6 @@ class TestAlertDiskSpace(RMFTestCase):
       total = 5418482688L, used = 1625544806L,
       free = 3792937882L, path="/usr/hdp")
 
-    res = alert_disk_space.execute()
+    res = alert_disk_space.execute(configurations={'{{cluster-env/stack_root}}': '/usr/hdp'})
     self.assertEqual(res, (
       'WARNING', ["Capacity Used: [30.00%, 1.6 GB], Capacity Total: [5.4 GB], path=/usr/hdp. Total free space is less than 5.0 GB"]))


[11/50] [abbrv] ambari git commit: AMBARI-21164. Remove unused import to fix build error

Posted by nc...@apache.org.
AMBARI-21164. Remove unused import to fix build error


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1586a1a9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1586a1a9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1586a1a9

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 1586a1a9253e5c456161331a5766ed52e250ad99
Parents: 84c4e0d
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Wed Jun 7 15:34:35 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Wed Jun 7 15:34:35 2017 +0200

----------------------------------------------------------------------
 .../ambari/server/checks/ServiceCheckValidityCheckTest.java       | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1586a1a9/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
index 91fd72a..75970cc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
@@ -24,7 +24,6 @@ import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
-import java.util.Arrays;
 import java.util.Collections;
 
 import org.apache.ambari.server.AmbariException;
@@ -231,4 +230,4 @@ public class ServiceCheckValidityCheckTest {
     serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
     Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
   }
-}
\ No newline at end of file
+}


[18/50] [abbrv] ambari git commit: AMBARI-21146. Knox JAAS configuration file should not allow the Kerberos ticket cache to be used when establishing its identity on startup (Attila Magyar via adoroszlai)

Posted by nc...@apache.org.
AMBARI-21146. Knox JAAS configuration file should not allow the Kerberos ticket cache to be used when establishing its identity on startup (Attila Magyar via adoroszlai)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e71f49e4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e71f49e4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e71f49e4

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e71f49e4ef30ff720ad4f8b7fb3823d68acd48cc
Parents: 9b44b62
Author: Attila Magyar <am...@hortonworks.com>
Authored: Thu Jun 8 11:23:29 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Thu Jun 8 11:23:29 2017 +0200

----------------------------------------------------------------------
 .../KNOX/0.5.0.2.2/package/templates/krb5JAASLogin.conf.j2  | 9 +++------
 .../KNOX/0.5.0.3.0/package/templates/krb5JAASLogin.conf.j2  | 9 +++------
 2 files changed, 6 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e71f49e4/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/krb5JAASLogin.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/krb5JAASLogin.conf.j2 b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/krb5JAASLogin.conf.j2
index fa3237b..29b2179 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/krb5JAASLogin.conf.j2
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/krb5JAASLogin.conf.j2
@@ -17,14 +17,11 @@
 #}
 com.sun.security.jgss.initiate {
 com.sun.security.auth.module.Krb5LoginModule required
-renewTGT=true
+renewTGT=false
 doNotPrompt=true
 useKeyTab=true
 keyTab="{{knox_keytab_path}}"
 principal="{{knox_principal_name}}"
-isInitiator=true
 storeKey=true
-useTicketCache=true
-client=true;
-};
-
+useTicketCache=false;
+};
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e71f49e4/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/templates/krb5JAASLogin.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/templates/krb5JAASLogin.conf.j2 b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/templates/krb5JAASLogin.conf.j2
index fa3237b..29b2179 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/templates/krb5JAASLogin.conf.j2
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/templates/krb5JAASLogin.conf.j2
@@ -17,14 +17,11 @@
 #}
 com.sun.security.jgss.initiate {
 com.sun.security.auth.module.Krb5LoginModule required
-renewTGT=true
+renewTGT=false
 doNotPrompt=true
 useKeyTab=true
 keyTab="{{knox_keytab_path}}"
 principal="{{knox_principal_name}}"
-isInitiator=true
 storeKey=true
-useTicketCache=true
-client=true;
-};
-
+useTicketCache=false;
+};
\ No newline at end of file


[50/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-12556

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-12556


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/382da979
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/382da979
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/382da979

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 382da97990dfe673bb6d4d69590444ddbbb455eb
Parents: 871f85b 8d129d4
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Jun 12 12:44:40 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Jun 12 12:44:40 2017 -0400

----------------------------------------------------------------------
 .../org/apache/ambari/tools/zk/ZkMigrator.java  |  44 +-
 .../apache/ambari/tools/zk/ZkMigratorTest.java  |  60 +-
 .../main/python/ambari_commons/inet_utils.py    |  32 ++
 .../src/main/python/ambari_commons/os_check.py  |   6 +
 .../ambari_commons/resources/os_family.json     |  10 +
 .../core/providers/__init__.py                  |  19 +-
 .../core/resources/zkmigrator.py                |  17 +-
 .../libraries/providers/__init__.py             |   3 -
 .../InfraKerberosHostValidator.java             |  54 --
 .../InfraRuleBasedAuthorizationPlugin.java      | 542 -------------------
 .../InfraUserRolesLookupStrategy.java           |  49 --
 .../security/InfraKerberosHostValidator.java    |  54 ++
 .../InfraRuleBasedAuthorizationPlugin.java      | 542 +++++++++++++++++++
 .../security/InfraUserRolesLookupStrategy.java  |  49 ++
 ambari-logsearch/docker/Dockerfile              |   6 +-
 ambari-metrics/ambari-metrics-common/pom.xml    |   5 +
 .../timeline/AbstractTimelineMetricsSink.java   |  64 ++-
 .../sink/timeline/AppCookieManager.java         | 219 ++++++++
 .../sink/timeline/AppCookieManagerTest.java     |  52 ++
 .../ambari-metrics-host-aggregator/pom.xml      |  30 +-
 .../AbstractMetricPublisherThread.java          | 134 -----
 .../aggregator/AggregatedMetricsPublisher.java  | 101 ----
 .../host/aggregator/AggregatorApplication.java  |  98 ++--
 .../host/aggregator/AggregatorWebService.java   |   2 +-
 .../host/aggregator/RawMetricsPublisher.java    |  60 --
 .../host/aggregator/TimelineMetricsHolder.java  |  26 +-
 .../sink/timeline/AbstractMetricPublisher.java  | 169 ++++++
 .../timeline/AggregatedMetricsPublisher.java    | 103 ++++
 .../sink/timeline/RawMetricsPublisher.java      |  65 +++
 .../aggregator/AggregatorApplicationTest.java   |  55 ++
 .../aggregator/AggregatorWebServiceTest.java    | 135 +++++
 .../aggregator/TimelineMetricsHolderTest.java   | 107 ++++
 .../timeline/AbstractMetricPublisherTest.java   |  82 +++
 .../AggregatedMetricsPublisherTest.java         | 154 ++++++
 .../sink/timeline/RawMetricsPublisherTest.java  | 151 ++++++
 .../src/main/python/core/aggregator.py          |   6 +-
 .../src/main/python/core/controller.py          |   2 +-
 ambari-project/pom.xml                          |   5 +
 .../security/kerberos/kerberos_descriptor.md    |   5 +-
 ambari-server/pom.xml                           |  24 +-
 .../server/KdcServerConnectionVerification.java |   2 +-
 .../ambari/server/StateRecoveryManager.java     |   2 +-
 .../actionmanager/ActionDBAccessorImpl.java     |   6 +-
 .../server/actionmanager/ActionManager.java     |   8 +-
 .../server/actionmanager/ActionScheduler.java   |  18 +-
 .../ambari/server/actionmanager/Stage.java      |   6 +-
 .../apache/ambari/server/agent/ActionQueue.java |   2 +-
 .../ambari/server/agent/AgentRequests.java      |  10 +-
 .../ambari/server/agent/ExecutionCommand.java   |   6 +-
 .../ambari/server/agent/HeartBeatHandler.java   |  17 +-
 .../ambari/server/agent/HeartbeatMonitor.java   |  14 +-
 .../ambari/server/agent/HeartbeatProcessor.java |   4 +-
 .../apache/ambari/server/agent/HostInfo.java    |   2 +-
 .../apache/ambari/server/agent/Register.java    |   2 +-
 .../ambari/server/agent/rest/AgentResource.java |  18 +-
 .../ambari/server/api/query/QueryImpl.java      |   4 +-
 .../server/api/rest/BootStrapResource.java      |   6 +-
 .../server/api/services/AmbariMetaInfo.java     |  12 +-
 .../ambari/server/api/services/BaseRequest.java |   2 +-
 .../api/services/PersistKeyValueService.java    |  16 +-
 .../services/parsers/JsonRequestBodyParser.java |   2 +-
 .../StackAdvisorBlueprintProcessor.java         |   2 +-
 .../commands/StackAdvisorCommand.java           |  10 +-
 .../views/ViewDataMigrationService.java         |   6 +-
 .../server/bootstrap/BSHostStatusCollector.java |  12 +-
 .../ambari/server/bootstrap/BSRunner.java       |  14 +-
 .../ambari/server/bootstrap/BootStrapImpl.java  |   6 +-
 .../server/checks/AbstractCheckDescriptor.java  |   7 -
 .../checks/ServiceCheckValidityCheck.java       |   9 +-
 .../server/checks/UpgradeCheckRegistry.java     |   4 +-
 .../server/configuration/Configuration.java     |  17 +
 .../controller/AmbariActionExecutionHelper.java |   9 +-
 .../AmbariCustomCommandExecutionHelper.java     |  17 +-
 .../server/controller/AmbariHandlerList.java    |   2 +-
 .../AmbariManagementControllerImpl.java         |  96 ++--
 .../ambari/server/controller/AmbariServer.java  |   2 +-
 .../server/controller/ControllerModule.java     |   2 +-
 .../server/controller/KerberosHelperImpl.java   |  21 +-
 .../internal/AbstractPropertyProvider.java      |   4 +-
 .../internal/AbstractProviderModule.java        |  15 +-
 .../internal/ActionResourceProvider.java        |   3 +-
 .../controller/internal/AppCookieManager.java   |   8 +-
 .../controller/internal/BaseProvider.java       |  10 +-
 .../BlueprintConfigurationProcessor.java        |  28 +-
 .../internal/ClientConfigResourceProvider.java  |   2 +-
 .../internal/ClusterControllerImpl.java         |   4 +-
 .../internal/ClusterResourceProvider.java       |   6 +-
 ...atibleRepositoryVersionResourceProvider.java |   2 +-
 .../internal/ComponentResourceProvider.java     |   6 +-
 .../internal/ConfigGroupResourceProvider.java   |  15 +-
 .../internal/GroupResourceProvider.java         |   4 +-
 .../internal/HostComponentResourceProvider.java |   2 +-
 .../internal/HostResourceProvider.java          |  12 +-
 .../controller/internal/HostStatusHelper.java   |   2 +-
 .../internal/JobResourceProvider.java           |  12 +-
 .../internal/MemberResourceProvider.java        |   4 +-
 .../PreUpgradeCheckResourceProvider.java        |   8 +-
 .../internal/RequestResourceProvider.java       |   6 +-
 .../internal/RequestStageContainer.java         |   2 +-
 .../internal/ServiceResourceProvider.java       |  53 +-
 .../internal/TaskAttemptResourceProvider.java   |   6 +-
 .../controller/internal/URLStreamProvider.java  |  10 +-
 .../internal/UpgradeResourceProvider.java       |   5 -
 .../internal/UserResourceProvider.java          |   4 +-
 .../internal/WidgetLayoutResourceProvider.java  |   4 +-
 .../internal/WorkflowResourceProvider.java      |   6 +-
 .../server/controller/jmx/JMXMetricHolder.java  |   2 +-
 .../logging/LogSearchDataRetrievalService.java  |   4 +-
 .../logging/LoggingRequestHelperImpl.java       |  15 +-
 .../logging/LoggingSearchPropertyProvider.java  |   4 +-
 .../metrics/MetricsCollectorHAManager.java      |   2 +-
 .../metrics/MetricsDownsamplingMethod.java      |   5 +-
 .../metrics/timeline/AMSPropertyProvider.java   |   2 +-
 .../metrics/timeline/MetricsRequestHelper.java  |  20 +-
 .../timeline/cache/TimelineMetricCache.java     |  28 +-
 .../cache/TimelineMetricCacheEntryFactory.java  |  46 +-
 .../cache/TimelineMetricsCacheSizeOfEngine.java |   6 +-
 .../controller/utilities/KerberosChecker.java   |   2 +-
 .../ServiceCalculatedStateFactory.java          |   2 +-
 .../server/credentialapi/CredentialUtil.java    |   2 +-
 .../customactions/ActionDefinitionManager.java  |   6 +-
 .../alerts/AlertHashInvalidationListener.java   |   2 +-
 .../alerts/AlertLifecycleListener.java          |   2 +-
 .../alerts/AlertMaintenanceModeListener.java    |   2 +-
 .../AlertServiceComponentHostListener.java      |   2 +-
 .../alerts/AlertServiceStateListener.java       |   2 +-
 .../alerts/AlertStateChangedListener.java       |   2 +-
 .../upgrade/HostVersionOutOfSyncListener.java   |   2 -
 .../CachedRoleCommandOrderProvider.java         |   2 +-
 .../server/metadata/RoleCommandOrder.java       |   7 +-
 .../system/impl/AmbariPerformanceMonitor.java   |   4 +-
 .../system/impl/DatabaseMetricsSource.java      |   2 +-
 .../metrics/system/impl/JvmMetricsSource.java   |   2 +-
 .../system/impl/MetricsConfiguration.java       |   2 +-
 .../metrics/system/impl/MetricsServiceImpl.java |   2 +-
 .../dispatchers/AlertScriptDispatcher.java      |  45 +-
 .../ambari/server/orm/DBAccessorImpl.java       |   2 +-
 .../server/orm/dao/AlertDefinitionDAO.java      |   2 +-
 .../ambari/server/orm/dao/HostVersionDAO.java   |   6 +-
 .../ambari/server/orm/dao/ServiceConfigDAO.java |   1 -
 .../orm/entities/RepositoryVersionEntity.java   |   2 +-
 .../orm/helpers/dbms/GenericDbmsHelper.java     |   4 +-
 .../server/resources/ResourceManager.java       |  11 +-
 .../server/resources/api/rest/GetResource.java  |   9 +-
 .../scheduler/AbstractLinearExecutionJob.java   |   6 +-
 .../scheduler/ExecutionScheduleManager.java     |  12 +-
 .../scheduler/ExecutionSchedulerImpl.java       |   4 +-
 .../server/security/CertificateManager.java     |   6 +-
 .../ambari/server/security/SecurityFilter.java  |  12 +-
 .../AmbariLdapBindAuthenticator.java            |   2 +-
 .../AmbariPamAuthenticationProvider.java        |   2 +-
 .../jwt/JwtAuthenticationFilter.java            |   2 +-
 .../encryption/MasterKeyServiceImpl.java        |   8 +-
 .../unsecured/rest/CertificateDownload.java     |   6 +-
 .../unsecured/rest/CertificateSign.java         |   6 +-
 .../security/unsecured/rest/ConnectionInfo.java |   6 +-
 .../kerberos/ADKerberosOperationHandler.java    |   6 +-
 .../kerberos/CreateKeytabFilesServerAction.java |   6 +-
 .../kerberos/DestroyPrincipalsServerAction.java |   2 +-
 .../kerberos/IPAKerberosOperationHandler.java   |  18 +-
 .../kerberos/MITKerberosOperationHandler.java   |   2 +-
 .../serveraction/upgrades/ConfigureAction.java  |   2 +-
 .../upgrades/FinalizeUpgradeAction.java         |   4 +-
 .../upgrades/UpdateDesiredStackAction.java      |   2 +-
 .../ambari/server/stack/ExtensionDirectory.java |   2 +-
 .../ambari/server/stack/ExtensionModule.java    |   4 +-
 .../ambari/server/stack/MasterHostResolver.java |   2 +-
 .../ambari/server/stack/ServiceDirectory.java   |   2 +-
 .../ambari/server/stack/ServiceModule.java      |   4 +-
 .../ambari/server/stack/StackDirectory.java     |   7 +-
 .../ambari/server/stack/StackManager.java       |   9 +-
 .../apache/ambari/server/stack/StackModule.java |  20 +-
 .../ambari/server/stageplanner/RoleGraph.java   |  51 +-
 .../apache/ambari/server/state/CheckHelper.java |  39 +-
 .../org/apache/ambari/server/state/Cluster.java |   6 +-
 .../ambari/server/state/ConfigFactory.java      |   8 -
 .../server/state/ServiceComponentImpl.java      |  21 +-
 .../apache/ambari/server/state/ServiceImpl.java |  21 +-
 .../apache/ambari/server/state/StackInfo.java   |   2 +-
 .../ambari/server/state/UpgradeContext.java     |   2 -
 .../ambari/server/state/action/ActionImpl.java  |  23 +-
 .../ambari/server/state/alert/AlertUri.java     |   2 +-
 .../server/state/cluster/ClusterImpl.java       |  45 +-
 .../server/state/cluster/ClustersImpl.java      |   6 +-
 .../ambari/server/state/host/HostImpl.java      |  32 +-
 .../state/kerberos/KerberosDescriptor.java      |  51 ++
 .../kerberos/VariableReplacementHelper.java     |  23 +-
 .../server/state/stack/ConfigUpgradePack.java   |   2 +-
 .../ambari/server/state/stack/OsFamily.java     |  12 +
 .../state/stack/StackRoleCommandOrder.java      |   2 +-
 .../ambari/server/state/stack/UpgradePack.java  |   2 +-
 .../state/stack/upgrade/ColocatedGrouping.java  |   2 +-
 .../upgrade/ConfigUpgradeChangeDefinition.java  |   2 +-
 .../state/stack/upgrade/ConfigureTask.java      |   2 +-
 .../state/stack/upgrade/HostOrderGrouping.java  |   2 +-
 .../stack/upgrade/RepositoryVersionHelper.java  |   4 +-
 .../state/stack/upgrade/RestartGrouping.java    |   2 +-
 .../stack/upgrade/ServiceCheckGrouping.java     |   2 +-
 .../state/stack/upgrade/StartGrouping.java      |   2 +-
 .../state/stack/upgrade/StopGrouping.java       |   2 +-
 .../state/stack/upgrade/TaskWrapperBuilder.java |   2 +-
 .../svccomphost/ServiceComponentHostImpl.java   |  23 +-
 .../server/topology/BlueprintValidatorImpl.java |   2 +-
 .../topology/tasks/ConfigureClusterTask.java    |   2 +-
 .../server/topology/tasks/TopologyHostTask.java |   4 +-
 .../validators/ClusterConfigTypeValidator.java  |   2 +-
 .../server/upgrade/AbstractUpgradeCatalog.java  |   2 +-
 .../server/upgrade/SchemaUpgradeHelper.java     |   2 +-
 .../server/upgrade/UpgradeCatalog222.java       |   2 +-
 .../server/upgrade/UpgradeCatalog250.java       |  39 +-
 .../apache/ambari/server/utils/AmbariPath.java  |   2 +-
 .../ambari/server/utils/CommandUtils.java       |   2 +-
 .../ambari/server/utils/SecretReference.java    |   2 +-
 .../ambari/server/utils/ShellCommandUtil.java   |   6 +-
 .../apache/ambari/server/utils/StageUtils.java  |   6 +-
 .../server/view/HttpImpersonatorImpl.java       |   2 +-
 .../server/view/ViewAmbariStreamProvider.java   |   2 +-
 .../ambari/server/view/ViewContextImpl.java     |   6 +-
 .../view/ViewDataMigrationContextImpl.java      |   6 +-
 .../server/view/ViewDataMigrationUtility.java   |   4 +-
 .../server/view/ViewDirectoryWatcher.java       |   6 +-
 .../ambari/server/view/ViewExtractor.java       |   8 +-
 .../apache/ambari/server/view/ViewRegistry.java |  27 +-
 .../server/view/ViewURLStreamProvider.java      |  14 +-
 .../main/python/ambari_server/setupMpacks.py    |   4 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  12 +-
 .../0.1.0/configuration/ams-hbase-env.xml       |   4 +-
 .../package/templates/hbase_master_jaas.conf.j2 |  10 +
 .../templates/hbase_regionserver_jaas.conf.j2   |  10 +
 .../DRUID/0.9.2/configuration/druid-env.xml     |   2 +-
 .../DRUID/0.9.2/package/scripts/druid.py        |   7 +
 .../DRUID/0.9.2/package/scripts/params.py       |   1 +
 .../package/templates/hbase_master_jaas.conf.j2 |  10 +
 .../templates/hbase_regionserver_jaas.conf.j2   |  10 +
 .../HBASE/2.0.0.3.0/configuration/hbase-env.xml |   4 +-
 .../package/templates/hbase_master_jaas.conf.j2 |  10 +
 .../templates/hbase_regionserver_jaas.conf.j2   |  10 +
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |  17 +
 .../package/templates/hdfs_dn_jaas.conf.j2      |  27 +
 .../package/templates/hdfs_jn_jaas.conf.j2      |  27 +
 .../package/templates/hdfs_nn_jaas.conf.j2      |  27 +
 .../HDFS/3.0.0.3.0/package/scripts/hdfs.py      |  17 +
 .../package/templates/hdfs_dn_jaas.conf.j2      |  27 +
 .../package/templates/hdfs_jn_jaas.conf.j2      |  27 +
 .../package/templates/hdfs_nn_jaas.conf.j2      |  27 +
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml |   2 +-
 .../KAFKA/0.8.1/configuration/kafka-env.xml     |   4 +
 .../0.8.1/configuration/kafka_jaas_conf.xml     |  11 +
 .../0.8.1/package/templates/kafka_jaas.conf.j2  |  11 +
 .../package/templates/krb5JAASLogin.conf.j2     |   9 +-
 .../package/templates/krb5JAASLogin.conf.j2     |   9 +-
 .../0.9.1/package/scripts/storm_yaml_utils.py   |   5 +-
 .../0.9.1/package/templates/storm_jaas.conf.j2  |  10 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  32 +-
 .../package/scripts/resourcemanager.py          |   2 +-
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |  17 +
 .../package/templates/mapred_jaas.conf.j2       |  28 +
 .../package/templates/yarn_ats_jaas.conf.j2     |  27 +
 .../package/templates/yarn_jaas.conf.j2         |  12 +-
 .../package/templates/yarn_nm_jaas.conf.j2      |  27 +
 .../configuration-mapred/mapred-env.xml         |   4 +-
 .../YARN/3.0.0.3.0/configuration/yarn-env.xml   |  15 +-
 .../YARN/3.0.0.3.0/kerberos.json                |   4 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |  32 +-
 .../package/scripts/resourcemanager.py          |   2 +-
 .../YARN/3.0.0.3.0/package/scripts/yarn.py      |  19 +-
 .../package/templates/mapred_jaas.conf.j2       |  28 +
 .../package/templates/yarn_ats_jaas.conf.j2     |  27 +
 .../package/templates/yarn_jaas.conf.j2         |  12 +-
 .../package/templates/yarn_nm_jaas.conf.j2      |  27 +
 .../YARN/3.0.0.3.0/service_advisor.py           |   4 +-
 .../0.6.0.2.5/package/scripts/master.py         |   4 +-
 .../resources/host_scripts/alert_disk_space.py  |  22 +-
 .../services/HIVE/configuration/hive-site.xml   |   2 +-
 .../YARN/configuration-mapred/mapred-env.xml    |   4 +-
 .../services/HBASE/configuration/hbase-env.xml  |   4 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   7 +
 .../services/YARN/configuration/yarn-env.xml    |  16 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   7 +
 .../resources/stacks/HDP/2.6/repos/repoinfo.xml |  14 +
 .../services/SPARK2/configuration/livy2-env.xml |   4 +-
 .../stacks/HDP/2.6/services/YARN/kerberos.json  |   4 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   7 +
 .../YARN/configuration-mapred/mapred-env.xml    |   4 +-
 .../src/main/resources/stacks/stack_advisor.py  | 115 +++-
 .../ambari/server/agent/AgentResourceTest.java  |   6 +-
 .../server/agent/LocalAgentSimulator.java       |   6 +-
 .../ambari/server/agent/TestActionQueue.java    |   2 +-
 .../server/agent/TestHeartbeatHandler.java      |   4 +-
 .../server/api/services/AmbariMetaInfoTest.java |   4 +-
 .../server/api/services/PersistServiceTest.java |   6 +-
 .../server/bootstrap/BootStrapResourceTest.java |   6 +-
 .../ambari/server/bootstrap/BootStrapTest.java  |   6 +-
 .../checks/ServiceCheckValidityCheckTest.java   |  42 +-
 .../AmbariManagementControllerImplTest.java     |  13 +-
 .../AmbariManagementControllerTest.java         |  17 +-
 .../server/controller/KerberosHelperTest.java   | 129 ++---
 .../AbstractJDBCResourceProviderTest.java       |   2 +-
 .../BlueprintConfigurationProcessorTest.java    |   3 +
 .../CredentialResourceProviderTest.java         |  10 +-
 .../PreUpgradeCheckResourceProviderTest.java    |  15 +-
 .../StackUpgradeConfigurationMergeTest.java     |   3 +-
 .../internal/UpgradeResourceProviderTest.java   |   2 +-
 .../metrics/ganglia/GangliaMetricTest.java      |   2 +-
 .../dispatchers/AlertScriptDispatcherTest.java  |  67 +++
 .../apache/ambari/server/orm/OrmTestHelper.java |   4 +-
 .../orm/dao/RepositoryVersionDAOTest.java       |   2 +-
 .../sample/checks/SampleServiceCheck.java       |   4 -
 .../server/security/CertGenerationTest.java     |   6 +-
 .../server/security/SslExecutionTest.java       |   6 +-
 .../encryption/MasterKeyServiceTest.java        |   6 +-
 .../upgrades/ConfigureActionTest.java           |   5 -
 .../upgrades/UpgradeActionTest.java             |   5 -
 .../server/stageplanner/TestStagePlanner.java   | 197 ++++++-
 .../ambari/server/state/CheckHelperTest.java    |  36 +-
 .../server/state/cluster/ClusterTest.java       |   4 +-
 .../ambari/server/state/host/HostTest.java      |   6 +-
 .../state/kerberos/KerberosDescriptorTest.java  |   9 +
 .../kerberos/VariableReplacementHelperTest.java | 130 +++--
 .../svccomphost/ServiceComponentHostTest.java   |   5 +-
 .../server/testing/DBInconsistencyTests.java    |   2 +-
 .../server/upgrade/UpgradeCatalogHelper.java    |   7 -
 .../ambari/server/upgrade/UpgradeTest.java      |   2 +-
 .../python/host_scripts/TestAlertDiskSpace.py   |  12 +-
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  10 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |  11 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  24 +-
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |  10 +
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |  12 +-
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  17 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |  10 +
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |  10 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       |  10 +
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |  10 +
 .../stacks/2.0.6/YARN/test_yarn_client.py       |  10 +
 .../stacks/2.0.6/common/test_stack_advisor.py   | 153 ++++++
 .../test/python/stacks/2.6/DRUID/test_druid.py  |  23 +-
 .../test/python/stacks/2.6/configs/default.json |   3 +-
 .../stacks/HDP/2.0.6.1/role_command_order.json  | 101 +---
 ambari-web/app/config.js                        |   9 +-
 .../progress_popup_controller.js                |   2 +-
 .../main/admin/stack_and_upgrade_controller.js  |   2 +-
 .../manage_alert_notifications_controller.js    |  35 +-
 ambari-web/app/messages.js                      |   9 +-
 ambari-web/app/routes/add_service_routes.js     |   2 +-
 ambari-web/app/routes/main.js                   |   2 +-
 ambari-web/app/styles/application.less          |  11 +
 ambari-web/app/styles/bootstrap_overrides.less  |  49 ++
 ambari-web/app/styles/dashboard.less            | 115 ++--
 .../templates/common/export_metrics_menu.hbs    |  16 +-
 ambari-web/app/templates/common/progress.hbs    |   4 +
 ambari-web/app/templates/main.hbs               |   2 +-
 .../app/templates/main/admin/kerberos.hbs       |  34 +-
 .../main/alerts/create_alert_notification.hbs   |  14 +
 ambari-web/app/templates/main/dashboard.hbs     |   2 +-
 .../main/dashboard/widgets/cluster_metrics.hbs  |  12 +-
 .../main/dashboard/widgets/hbase_links.hbs      |  15 +-
 .../main/dashboard/widgets/hdfs_links.hbs       |  17 +-
 .../main/dashboard/widgets/pie_chart.hbs        |  21 +-
 .../main/dashboard/widgets/simple_text.hbs      |  21 +-
 .../templates/main/dashboard/widgets/uptime.hbs |  16 +-
 .../main/dashboard/widgets/yarn_links.hbs       |  18 +-
 .../main/service/all_services_actions.hbs       |   6 +-
 ambari-web/app/utils/ajax/ajax.js               |   2 +-
 ambari-web/app/utils/config.js                  |   2 +-
 ambari-web/app/utils/validator.js               |   8 +
 ambari-web/app/views/common/chart/pie.js        |  10 +-
 .../configs/service_config_container_view.js    |   2 +-
 ambari-web/app/views/loading.js                 |   2 +-
 ambari-web/app/views/main/admin.js              |  14 +-
 .../views/main/admin/kerberos/disable_view.js   |   3 +-
 .../main/admin/stack_upgrade/services_view.js   |   2 +-
 .../main/alerts/definition_details_view.js      |   2 +-
 .../dashboard/widgets/cluster_metrics_widget.js |   3 -
 .../main/dashboard/widgets/pie_chart_widget.js  |  23 +-
 ambari-web/app/views/main/menu.js               |  16 +-
 ambari-web/app/views/main/service/item.js       |   2 +-
 ...anage_alert_notifications_controller_test.js |  38 +-
 ambari-web/test/utils/config_test.js            |   2 +-
 .../admin/stack_upgrade/services_view_test.js   |   1 +
 .../main/alerts/definition_details_view_test.js |   2 +-
 .../src/main/assemblies/odpi-ambari-mpack.xml   |   2 +-
 .../main/resources/ui/app/models/worksheet.js   |   1 +
 .../resources/ui/app/routes/queries/query.js    |  15 +-
 .../ui/app/routes/queries/query/results.js      |   6 +-
 .../templates/components/query-result-table.hbs |  92 ++--
 .../ui/app/templates/queries/query.hbs          |  10 +-
 .../ui/app/templates/queries/query/log.hbs      |  12 +-
 .../ui/app/templates/queries/query/results.hbs  |   9 +-
 .../src/main/resources/ui/ember-cli-build.js    |   1 +
 .../resources/ui/vendor/browser-pollyfills.js   | 213 ++++++++
 docs/pom.xml                                    |   2 +-
 392 files changed, 5474 insertions(+), 2497 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index bd4c9dd,3a650ef..60df8cf
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@@ -26,14 -26,11 +26,14 @@@ import java.util.Set
  
  import org.apache.ambari.annotations.Experimental;
  import org.apache.ambari.annotations.ExperimentalFeature;
 +import org.apache.ambari.server.AmbariException;
  import org.apache.ambari.server.RoleCommand;
 -import org.apache.ambari.server.state.ServiceInfo;
 +import org.apache.ambari.server.state.Cluster;
 +import org.apache.ambari.server.state.Service;
 +import org.apache.ambari.server.state.ServiceComponent;
  import org.apache.ambari.server.utils.StageUtils;
- import org.apache.commons.logging.Log;
- import org.apache.commons.logging.LogFactory;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
  
  import com.google.gson.annotations.SerializedName;
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostVersionDAO.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/382da979/ambari-web/app/messages.js
----------------------------------------------------------------------


[14/50] [abbrv] ambari git commit: AMBARI-21187. Get rid deprecated jdk install link in the Dockerfile of Log Search (oleewere)

Posted by nc...@apache.org.
AMBARI-21187. Get rid deprecated jdk install link in the Dockerfile of Log Search (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b772b4d6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b772b4d6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b772b4d6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b772b4d625b9de6eaa46926ccbe3244811c9865f
Parents: 3b575fb
Author: oleewere <ol...@gmail.com>
Authored: Wed Jun 7 19:35:08 2017 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Wed Jun 7 19:35:08 2017 +0200

----------------------------------------------------------------------
 ambari-logsearch/docker/Dockerfile | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b772b4d6/ambari-logsearch/docker/Dockerfile
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/Dockerfile b/ambari-logsearch/docker/Dockerfile
index d399fc6..2b8fd5d 100644
--- a/ambari-logsearch/docker/Dockerfile
+++ b/ambari-logsearch/docker/Dockerfile
@@ -22,9 +22,9 @@ RUN yum -y install glibc-common
 ENV HOME /root
 
 #Install JAVA
-ENV JAVA_VERSION 8u31
-ENV BUILD_VERSION b13
-RUN wget --no-cookies --no-check-certificate --header "Cookie: oraclelicense=accept-securebackup-cookie" "http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/jdk-$JAVA_VERSION-linux-x64.rpm" -O jdk-8-linux-x64.rpm
+ENV JAVA_VERSION 8u131
+ENV BUILD_VERSION b11
+RUN wget --no-check-certificate --no-cookies --header "Cookie:oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/$JAVA_VERSION-$BUILD_VERSION/d54c1d3a095b4ff2b6607d096fa80163/jdk-$JAVA_VERSION-linux-x64.rpm -O jdk-8-linux-x64.rpm
 RUN rpm -ivh jdk-8-linux-x64.rpm
 ENV JAVA_HOME /usr/java/default/
 


[23/50] [abbrv] ambari git commit: AMBARI-20877.Custom RM principal causes zookeeper HA state store to be inaccessible. (Attila Magyar via stoader)

Posted by nc...@apache.org.
AMBARI-20877.Custom RM principal causes zookeeper HA state store to be inaccessible. (Attila Magyar via stoader)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/89797ea2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/89797ea2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/89797ea2

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 89797ea2c56ae0387899890756527e1019ffd3f3
Parents: e2fbd0f
Author: Attila Magyar <am...@hortonworks.com>
Authored: Thu Jun 8 15:46:51 2017 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Thu Jun 8 15:46:51 2017 +0200

----------------------------------------------------------------------
 .../security/kerberos/kerberos_descriptor.md    |   5 +-
 .../server/controller/KerberosHelperImpl.java   |  19 ++-
 .../state/kerberos/KerberosDescriptor.java      |  51 ++++++++
 .../kerberos/VariableReplacementHelper.java     |  23 +++-
 .../YARN/3.0.0.3.0/kerberos.json                |   4 +-
 .../stacks/HDP/2.6/services/YARN/kerberos.json  |   4 +-
 .../server/controller/KerberosHelperTest.java   | 129 +++++++++---------
 .../state/kerberos/KerberosDescriptorTest.java  |   9 ++
 .../kerberos/VariableReplacementHelperTest.java | 130 +++++++++++--------
 9 files changed, 242 insertions(+), 132 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/docs/security/kerberos/kerberos_descriptor.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/security/kerberos/kerberos_descriptor.md b/ambari-server/docs/security/kerberos/kerberos_descriptor.md
index 54af50f..a59564c 100644
--- a/ambari-server/docs/security/kerberos/kerberos_descriptor.md
+++ b/ambari-server/docs/security/kerberos/kerberos_descriptor.md
@@ -288,8 +288,9 @@ the configuration type and containing values for each relevant property.
 
 Each property name and value may be a concrete value or contain variables to be replaced using values
 from the stack-level `properties` block or any available configuration. Properties from the `properties`
-block are referenced by name (`${property_name}`) and configuration properties are reference by
-configuration specification (`${config-type/property_name}`).
+block are referenced by name (`${property_name}`), configuration properties are reference by
+configuration specification (`${config-type/property_name}`) and kerberos principals are referenced by the principal path
+(`principals/SERVICE/COMPONENT/principal_name`).
 
 ```
 "configurations" : [

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 87c826d..61674cf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -1241,10 +1241,21 @@ public class KerberosHelperImpl implements KerberosHelper {
   @Override
   public Map<String, Map<String, String>> calculateConfigurations(Cluster cluster, String hostname,
                                                                   Map<String, String> kerberosDescriptorProperties)
-      throws AmbariException {
-    return addAdditionalConfigurations(cluster,
-        calculateExistingConfigurations(cluster, hostname),
-        hostname, kerberosDescriptorProperties);
+      throws AmbariException
+  {
+    Map<String, Map<String, String>> configuration = addAdditionalConfigurations(cluster,
+      calculateExistingConfigurations(cluster, hostname),
+      hostname, kerberosDescriptorProperties);
+    configuration.put("principals", principalNames(cluster, configuration));
+    return configuration;
+  }
+
+  private Map<String, String> principalNames(Cluster cluster, Map<String, Map<String, String>> configuration) throws AmbariException {
+    Map<String, String> result = new HashMap<>();
+    for (Map.Entry<String, String> each : getKerberosDescriptor(cluster).principals().entrySet()) {
+      result.put(each.getKey(), variableReplacementHelper.replaceVariables(each.getValue(), configuration));
+    }
+    return result;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
index a1b9e5c..f9dfa4a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
@@ -20,12 +20,16 @@ package org.apache.ambari.server.state.kerberos;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 
+import org.apache.ambari.server.AmbariException;
+import org.apache.commons.lang.StringUtils;
+
 /**
  * KerberosDescriptor is an implementation of an AbstractKerberosDescriptorContainer that
  * encapsulates an entire Kerberos descriptor hierarchy.
@@ -418,4 +422,51 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
 
     return authToLocalProperties;
   }
+
+  /**
+   * Get a map of principals, where the key is the principal path (SERVICE/COMPONENT/principal_name or SERVICE/principal_name) and the value is the principal.
+   *
+   * For example if the kerberos principal of the HISTORYSERVER is defined in the kerberos.json:
+   * "name": "history_server_jhs",
+   *   "principal": {
+   *   "value": "jhs/_HOST@${realm}",
+   *   "type" : "service",
+   * },
+   * Then "jhs/_HOST@EXAMPLE.COM" will be put into the map under the "MAPREDUCE2/HISTORYSERVER/history_server_jhs" key.
+   */
+  public Map<String, String> principals() throws AmbariException {
+    Map<String,String> result = new HashMap<>();
+    for (AbstractKerberosDescriptorContainer each : nullToEmpty(getChildContainers())) {
+      if ((each instanceof KerberosServiceDescriptor)) {
+        collectFromComponents(each.getName(), nullToEmpty(((KerberosServiceDescriptor) each).getComponents()).values(), result);
+        collectFromIdentities(each.getName(), "", nullToEmpty(each.getIdentities()), result);
+      }
+    }
+    return result;
+  }
+
+  private static void collectFromComponents(String service, Collection<KerberosComponentDescriptor> components, Map<String, String> result) {
+    for (KerberosComponentDescriptor each : components) {
+      collectFromIdentities(service, each.getName(), nullToEmpty(each.getIdentities()), result);
+    }
+  }
+
+  private static void collectFromIdentities(String service, String component, Collection<KerberosIdentityDescriptor> identities, Map<String, String> result) {
+    for (KerberosIdentityDescriptor each : identities) {
+      if (each.getPrincipalDescriptor() != null && !each.getReferencedServiceName().isPresent() && !each.getName().startsWith("/")) {
+        String path = StringUtils.isBlank(component)
+          ? String.format("%s/%s", service, each.getName())
+          : String.format("%s/%s/%s", service, component, each.getName());
+        result.put(path, each.getPrincipalDescriptor().getName());
+      }
+    }
+  }
+
+  private static <T> Collection<T> nullToEmpty(Collection<T> collection) {
+    return collection == null ? Collections.<T>emptyList() : collection;
+  }
+
+  private static <K,V> Map<K,V> nullToEmpty(Map<K,V> collection) {
+    return collection == null ? Collections.<K,V>emptyMap() : collection;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelper.java
index b9e2841..a83f080 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelper.java
@@ -43,7 +43,7 @@ public class VariableReplacementHelper {
   /**
    * a regular expression Pattern used to find "variable" placeholders in strings
    */
-  private static final Pattern PATTERN_VARIABLE = Pattern.compile("\\$\\{(?:([\\w\\-\\.]+)/)?([\\w\\-\\.]+)(?:\\s*\\|\\s*(.+?))?\\}");
+  private static final Pattern PATTERN_VARIABLE = Pattern.compile("\\$\\{(?:([\\w\\-\\.]+)/)?([\\w\\-\\./]+)(?:\\s*\\|\\s*(.+?))?\\}");
 
   /**
    * a regular expression Pattern used to parse "function" declarations: name(arg1, arg2, ...)
@@ -59,6 +59,7 @@ public class VariableReplacementHelper {
       put("toLower", new ToLowerFunction());
       put("replace", new ReplaceValue());
       put("append", new AppendFunction());
+      put("principalPrimary", new PrincipalPrimary());
     }
   };
 
@@ -411,4 +412,24 @@ public class VariableReplacementHelper {
       return sourceData;
     }
   }
+
+  /**
+   * Get the primary part of a Kerberos principal.
+   * The format of a typical Kerberos principal is primary/instance@REALM.
+   */
+  private static class PrincipalPrimary implements Function {
+    @Override
+    public String perform(String[] args, String data, Map<String, Map<String, String>> replacementsMap) {
+      if (data == null) {
+        return null;
+      }
+      if (data.contains("/")) {
+        return data.split("/")[0];
+      } else if (data.contains("@")) {
+        return data.split("@")[0];
+      } else {
+        return data;
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
index ae4db4f..b1501b8 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
@@ -32,9 +32,9 @@
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
             "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda",
+            "yarn.resourcemanager.zk-acl" : "sasl:${principals/YARN/RESOURCEMANAGER/resource_manager_rm|principalPrimary()}:rwcda",
             "hadoop.registry.secure" : "true",
-            "hadoop.registry.system.accounts" : "sasl:yarn,sasl:mapred,sasl:hadoop,sasl:hdfs,sasl:rm,sasl:hive",
+            "hadoop.registry.system.accounts" : "sasl:${principals/YARN/APP_TIMELINE_SERVER/app_timeline_server_yarn|principalPrimary()},sasl:${principals/MAPREDUCE2/HISTORYSERVER/history_server_jhs|principalPrimary()},sasl:${principals/HDFS/NAMENODE/hdfs|principalPrimary()},sasl:${principals/YARN/RESOURCEMANAGER/resource_manager_rm|principalPrimary()},sasl:${principals/HIVE/HIVE_SERVER/hive_server_hive|principalPrimary()}",
             "hadoop.registry.client.auth" : "kerberos",
             "hadoop.registry.jaas.context" : "Client"
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
index ae4db4f..b1501b8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
@@ -32,9 +32,9 @@
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
             "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda",
+            "yarn.resourcemanager.zk-acl" : "sasl:${principals/YARN/RESOURCEMANAGER/resource_manager_rm|principalPrimary()}:rwcda",
             "hadoop.registry.secure" : "true",
-            "hadoop.registry.system.accounts" : "sasl:yarn,sasl:mapred,sasl:hadoop,sasl:hdfs,sasl:rm,sasl:hive",
+            "hadoop.registry.system.accounts" : "sasl:${principals/YARN/APP_TIMELINE_SERVER/app_timeline_server_yarn|principalPrimary()},sasl:${principals/MAPREDUCE2/HISTORYSERVER/history_server_jhs|principalPrimary()},sasl:${principals/HDFS/NAMENODE/hdfs|principalPrimary()},sasl:${principals/YARN/RESOURCEMANAGER/resource_manager_rm|principalPrimary()},sasl:${principals/HIVE/HIVE_SERVER/hive_server_hive|principalPrimary()}",
             "hadoop.registry.client.auth" : "kerberos",
             "hadoop.registry.jaas.context" : "Client"
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 98241eb..18a6754 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -1001,30 +1001,30 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient)).anyTimes();
 
-    final Service serviceKerberos = createStrictMock(Service.class);
+    final Service serviceKerberos = createNiceMock(Service.class);
     expect(serviceKerberos.getDesiredStackId()).andReturn(stackId).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(1);
+        .anyTimes();
     serviceKerberos.setSecurityState(SecurityState.SECURED_KERBEROS);
     expectLastCall().once();
 
-    final Service service1 = createStrictMock(Service.class);
+    final Service service1 = createNiceMock(Service.class);
     expect(service1.getDesiredStackId()).andReturn(stackId).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
     service1.setSecurityState(SecurityState.SECURED_KERBEROS);
     expectLastCall().once();
 
-    final Service service2 = createStrictMock(Service.class);
+    final Service service2 = createNiceMock(Service.class);
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getDesiredStackId()).andReturn(stackId).anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
     service2.setSecurityState(SecurityState.SECURED_KERBEROS);
     expectLastCall().once();
 
@@ -1090,7 +1090,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).once();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).once();
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
     setupStageFactory();
 
     // This is a STRICT mock to help ensure that the end result is what we want.
@@ -1203,7 +1203,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient)).anyTimes();
 
     final Service serviceKerberos = createNiceMock(Service.class);
-    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
@@ -1212,7 +1212,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expectLastCall().once();
 
     final Service service1 = createNiceMock(Service.class);
-    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
@@ -1221,7 +1221,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expectLastCall().once();
 
     final Service service2 = createNiceMock(Service.class);
-    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
@@ -1287,7 +1287,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).atLeastOnce();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).atLeastOnce();
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
     setupStageFactory();
 
     // This is a STRICT mock to help ensure that the end result is what we want.
@@ -1426,25 +1426,25 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(map).anyTimes();
 
     final Service serviceKerberos = createStrictMock(Service.class);
-    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(1);
+        .anyTimes();
 
     final Service service1 = createStrictMock(Service.class);
-    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
 
     final Service service2 = createStrictMock(Service.class);
-    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
 
     final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
@@ -1509,7 +1509,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).atLeastOnce();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).atLeastOnce();
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
     setupStageFactory();
 
     final RequestStageContainer requestStageContainer;
@@ -2050,7 +2050,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getIdentities(eq(true), EasyMock.<Map<String, Object>>anyObject())).andReturn(null).atLeastOnce();
     expect(kerberosDescriptor.getAuthToLocalProperties()).andReturn(Collections.singleton("core-site/auth.to.local")).atLeastOnce();
 
-    setupKerberosDescriptor(kerberosDescriptor, 2);
+    setupKerberosDescriptor(kerberosDescriptor);
 
     RecommendationResponse.BlueprintConfigurations coreSiteRecommendation = createNiceMock(RecommendationResponse
         .BlueprintConfigurations.class);
@@ -2485,7 +2485,7 @@ public class KerberosHelperTest extends EasyMockSupport {
       expectLastCall().once();
     }
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
 
     Map<String, Map<String, String>> existingConfigurations = new HashMap<>();
     existingConfigurations.put("kerberos-env", propertiesKerberosEnv);
@@ -2612,7 +2612,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getProperties()).andReturn(kerberosDescriptorProperties);
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(service1KerberosDescriptor).times(1);
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
 
     Map<String, Map<String, String>> existingConfigurations = new HashMap<>();
     existingConfigurations.put("kerberos-env", propertiesKerberosEnv);
@@ -2648,9 +2648,10 @@ public class KerberosHelperTest extends EasyMockSupport {
     assertEquals(0, capturedPrincipalsForKeytab.size());
   }
 
-  private void setupKerberosDescriptor(KerberosDescriptor kerberosDescriptor, int expectedCalls) throws Exception {
+  private void setupKerberosDescriptor(KerberosDescriptor kerberosDescriptor) throws Exception {
     // cluster.getCurrentStackVersion expectation is already specified in main test method
-    expect(metaInfo.getKerberosDescriptor("HDP", "2.2")).andReturn(kerberosDescriptor).times(expectedCalls);
+    expect(metaInfo.getKerberosDescriptor("HDP", "2.2")).andReturn(kerberosDescriptor).anyTimes();
+    expect(kerberosDescriptor.principals()).andReturn(Collections.<String, String>emptyMap()).anyTimes();
   }
 
   private void setupStageFactory() {
@@ -2736,25 +2737,25 @@ public class KerberosHelperTest extends EasyMockSupport {
     ).anyTimes();
 
     final Service serviceKerberos = createStrictMock(Service.class);
-    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(1);
+        .anyTimes();
 
     final Service service1 = createStrictMock(Service.class);
-    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
 
     final Service service2 = createStrictMock(Service.class);
-    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
 
     final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
@@ -2904,7 +2905,7 @@ public class KerberosHelperTest extends EasyMockSupport {
       expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(1);
     }
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
     setupStageFactory();
 
     // This is a STRICT mock to help ensure that the end result is what we want.
@@ -2995,25 +2996,25 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient)).anyTimes();
 
     final Service serviceKerberos = createStrictMock(Service.class);
-    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(1);
+        .anyTimes();
 
     final Service service1 = createStrictMock(Service.class);
-    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
 
     final Service service2 = createStrictMock(Service.class);
-    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(1);
+        .anyTimes();
 
     final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
@@ -3110,7 +3111,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(1);
     expect(kerberosDescriptor.getService("SERVICE3")).andReturn(serviceDescriptor3).times(1);
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
     setupStageFactory();
 
     // This is a STRICT mock to help ensure that the end result is what we want.
@@ -3210,26 +3211,26 @@ public class KerberosHelperTest extends EasyMockSupport {
       expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
       expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient)).anyTimes();
 
-      final Service serviceKerberos = createStrictMock(Service.class);
-      expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+      final Service serviceKerberos = createNiceMock(Service.class);
+      expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
       expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
       expect(serviceKerberos.getServiceComponents())
           .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-          .times(2);
+          .anyTimes();
 
-      final Service service1 = createStrictMock(Service.class);
-      expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+      final Service service1 = createNiceMock(Service.class);
+      expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
       expect(service1.getName()).andReturn("SERVICE1").anyTimes();
       expect(service1.getServiceComponents())
           .andReturn(Collections.<String, ServiceComponent>emptyMap())
-          .times(2);
+          .anyTimes();
 
-      final Service service2 = createStrictMock(Service.class);
-      expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+      final Service service2 = createNiceMock(Service.class);
+      expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
       expect(service2.getName()).andReturn("SERVICE2").anyTimes();
       expect(service2.getServiceComponents())
           .andReturn(Collections.<String, ServiceComponent>emptyMap())
-          .times(2);
+          .anyTimes();
 
 
       expect(cluster.getClusterName()).andReturn("c1").anyTimes();
@@ -3291,7 +3292,7 @@ public class KerberosHelperTest extends EasyMockSupport {
       final KerberosDescriptor kerberosDescriptor = createStrictMock(KerberosDescriptor.class);
       expect(kerberosDescriptor.getProperties()).andReturn(null).once();
 
-      setupKerberosDescriptor(kerberosDescriptor, 1);
+      setupKerberosDescriptor(kerberosDescriptor);
       setupStageFactory();
 
       // Preparation Stage
@@ -3384,26 +3385,26 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient)).anyTimes();
 
-    final Service serviceKerberos = createStrictMock(Service.class);
-    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    final Service serviceKerberos = createNiceMock(Service.class);
+    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(2);
+        .anyTimes();
 
-    final Service service1 = createStrictMock(Service.class);
-    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    final Service service1 = createNiceMock(Service.class);
+    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(2);
+        .anyTimes();
 
-    final Service service2 = createStrictMock(Service.class);
-    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    final Service service2 = createNiceMock(Service.class);
+    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(2);
+        .anyTimes();
 
     final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
@@ -3466,7 +3467,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final KerberosDescriptor kerberosDescriptor = createStrictMock(KerberosDescriptor.class);
     expect(kerberosDescriptor.getProperties()).andReturn(null).once();
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
     setupStageFactory();
 
     // This is a STRICT mock to help ensure that the end result is what we want.
@@ -3558,22 +3559,22 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient1)).anyTimes();
 
-    final Service serviceKerberos = createStrictMock(Service.class);
-    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    final Service serviceKerberos = createNiceMock(Service.class);
+    expect(serviceKerberos.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
         .anyTimes();
 
-    final Service service1 = createStrictMock(Service.class);
-    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    final Service service1 = createNiceMock(Service.class);
+    expect(service1.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
         .anyTimes();
 
-    final Service service2 = createStrictMock(Service.class);
-    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2"));
+    final Service service2 = createNiceMock(Service.class);
+    expect(service2.getDesiredStackId()).andReturn(new StackId("HDP-2.2")).anyTimes();
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
@@ -3779,7 +3780,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).anyTimes();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).anyTimes();
 
-    setupKerberosDescriptor(kerberosDescriptor, 1);
+    setupKerberosDescriptor(kerberosDescriptor);
 
     replayAll();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
index a63da61..7fb5624 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
@@ -493,4 +493,13 @@ public class KerberosDescriptorTest {
     }});
     Assert.assertEquals(1, identities.size());
   }
+
+  @Test
+  public void testCollectPrincipalNames() throws Exception {
+    URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_get_referenced_identity_descriptor.json");
+    KerberosDescriptor descriptor = KERBEROS_DESCRIPTOR_FACTORY.createInstance(new File(systemResourceURL.getFile()));
+    Map<String, String> principalsPerComponent = descriptor.principals();
+    Assert.assertEquals("service2_component1@${realm}", principalsPerComponent.get("SERVICE2/SERVICE2_COMPONENT1/service2_component1_identity"));
+    Assert.assertEquals("service1@${realm}", principalsPerComponent.get("SERVICE1/service1_identity"));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/89797ea2/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
index f00f694..e46294a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.server.state.kerberos;
 
+import static junit.framework.Assert.assertEquals;
+
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
@@ -72,63 +74,63 @@ public class VariableReplacementHelperTest {
       }
     };
 
-    Assert.assertEquals("concrete",
-        helper.replaceVariables("concrete", configurations));
+    assertEquals("concrete",
+      helper.replaceVariables("concrete", configurations));
 
-    Assert.assertEquals("Hello World",
-        helper.replaceVariables("${global_variable}", configurations));
+    assertEquals("Hello World",
+      helper.replaceVariables("${global_variable}", configurations));
 
-    Assert.assertEquals("Replacement1",
-        helper.replaceVariables("${config-type/variable.name}", configurations));
+    assertEquals("Replacement1",
+      helper.replaceVariables("${config-type/variable.name}", configurations));
 
-    Assert.assertEquals("Replacement1|Replacement2",
-        helper.replaceVariables("${config-type/variable.name}|${config-type2/variable.name}", configurations));
+    assertEquals("Replacement1|Replacement2",
+      helper.replaceVariables("${config-type/variable.name}|${config-type2/variable.name}", configurations));
 
-    Assert.assertEquals("Replacement1|Replacement2|${config-type3/variable.name}",
-        helper.replaceVariables("${config-type/variable.name}|${config-type2/variable.name}|${config-type3/variable.name}", configurations));
+    assertEquals("Replacement1|Replacement2|${config-type3/variable.name}",
+      helper.replaceVariables("${config-type/variable.name}|${config-type2/variable.name}|${config-type3/variable.name}", configurations));
 
-    Assert.assertEquals("Replacement2|Replacement2",
-        helper.replaceVariables("${config-type/variable.name1}|${config-type2/variable.name}", configurations));
+    assertEquals("Replacement2|Replacement2",
+      helper.replaceVariables("${config-type/variable.name1}|${config-type2/variable.name}", configurations));
 
-    Assert.assertEquals("Replacement1_reference",
-        helper.replaceVariables("${config-type/variable.name}_reference", configurations));
+    assertEquals("Replacement1_reference",
+      helper.replaceVariables("${config-type/variable.name}_reference", configurations));
 
-    Assert.assertEquals("dash",
-        helper.replaceVariables("${variable-name}", configurations));
+    assertEquals("dash",
+      helper.replaceVariables("${variable-name}", configurations));
 
-    Assert.assertEquals("underscore",
-        helper.replaceVariables("${variable_name}", configurations));
+    assertEquals("underscore",
+      helper.replaceVariables("${variable_name}", configurations));
 
-    Assert.assertEquals("config_type_dot",
-        helper.replaceVariables("${config_type/variable.name}", configurations));
+    assertEquals("config_type_dot",
+      helper.replaceVariables("${config_type/variable.name}", configurations));
 
-    Assert.assertEquals("config_type_dash",
-        helper.replaceVariables("${config_type/variable-name}", configurations));
+    assertEquals("config_type_dash",
+      helper.replaceVariables("${config_type/variable-name}", configurations));
 
-    Assert.assertEquals("config_type_underscore",
-        helper.replaceVariables("${config_type/variable_name}", configurations));
+    assertEquals("config_type_underscore",
+      helper.replaceVariables("${config_type/variable_name}", configurations));
 
-    Assert.assertEquals("config.type_dot",
-        helper.replaceVariables("${config.type/variable.name}", configurations));
+    assertEquals("config.type_dot",
+      helper.replaceVariables("${config.type/variable.name}", configurations));
 
-    Assert.assertEquals("config.type_dash",
-        helper.replaceVariables("${config.type/variable-name}", configurations));
+    assertEquals("config.type_dash",
+      helper.replaceVariables("${config.type/variable-name}", configurations));
 
-    Assert.assertEquals("config.type_underscore",
-        helper.replaceVariables("${config.type/variable_name}", configurations));
+    assertEquals("config.type_underscore",
+      helper.replaceVariables("${config.type/variable_name}", configurations));
 
-    Assert.assertEquals("dot",
-        helper.replaceVariables("${variable.name}", configurations));
+    assertEquals("dot",
+      helper.replaceVariables("${variable.name}", configurations));
 
     // Replacement yields an empty string
-    Assert.assertEquals("",
-        helper.replaceVariables("${config-type/variable.name2}", configurations));
+    assertEquals("",
+      helper.replaceVariables("${config-type/variable.name2}", configurations));
 
 
     // This might cause an infinite loop... we assume protection is in place...
     try {
-      Assert.assertEquals("${config-type2/self_reference}",
-          helper.replaceVariables("${config-type2/self_reference}", configurations));
+      assertEquals("${config-type2/self_reference}",
+        helper.replaceVariables("${config-type2/self_reference}", configurations));
       Assert.fail(String.format("%s expected to be thrown", AmbariException.class.getName()));
     } catch (AmbariException e) {
       // This is expected...
@@ -146,14 +148,14 @@ public class VariableReplacementHelperTest {
       }
     };
 
-    Assert.assertEquals("hive.metastore.local=false,hive.metastore.uris=thrift://c6401.ambari.apache.org:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@EXAMPLE.COM",
-        helper.replaceVariables("hive.metastore.local=false,hive.metastore.uris=thrift://${host}:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@${realm}", configurations));
+    assertEquals("hive.metastore.local=false,hive.metastore.uris=thrift://c6401.ambari.apache.org:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@EXAMPLE.COM",
+      helper.replaceVariables("hive.metastore.local=false,hive.metastore.uris=thrift://${host}:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@${realm}", configurations));
 
-    Assert.assertEquals("Hello my realm is {EXAMPLE.COM}",
-        helper.replaceVariables("Hello my realm is {${realm}}", configurations));
+    assertEquals("Hello my realm is {EXAMPLE.COM}",
+      helper.replaceVariables("Hello my realm is {${realm}}", configurations));
 
-    Assert.assertEquals("$c6401.ambari.apache.org",
-        helper.replaceVariables("$${host}", configurations));
+    assertEquals("$c6401.ambari.apache.org",
+      helper.replaceVariables("$${host}", configurations));
   }
 
   @Test
@@ -168,7 +170,7 @@ public class VariableReplacementHelperTest {
         put("kafka-broker", new HashMap<String, String>() {{
           put("listeners", "PLAINTEXT://localhost:6667");
         }});
-        
+
         put("clusterHostInfo", new HashMap<String, String>() {{
           put("hive_metastore_host", "host1.unit.test, host2.unit.test , host3.unit.test"); // spaces are there on purpose.
         }});
@@ -182,11 +184,11 @@ public class VariableReplacementHelperTest {
       }
     };
 
-    Assert.assertEquals("test=thrift://one:9083\\,thrift://two:9083\\,thrift://three:9083\\,thrift://four:9083",
-        helper.replaceVariables("test=${delimited.data|each(thrift://%s:9083, \\\\,, \\s*\\,\\s*)}", configurations));
+    assertEquals("test=thrift://one:9083\\,thrift://two:9083\\,thrift://three:9083\\,thrift://four:9083",
+      helper.replaceVariables("test=${delimited.data|each(thrift://%s:9083, \\\\,, \\s*\\,\\s*)}", configurations));
 
-    Assert.assertEquals("hive.metastore.local=false,hive.metastore.uris=thrift://host1.unit.test:9083\\,thrift://host2.unit.test:9083\\,thrift://host3.unit.test:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@UNIT.TEST",
-        helper.replaceVariables("hive.metastore.local=false,hive.metastore.uris=${clusterHostInfo/hive_metastore_host | each(thrift://%s:9083, \\\\,, \\s*\\,\\s*)},hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@${realm}", configurations));
+    assertEquals("hive.metastore.local=false,hive.metastore.uris=thrift://host1.unit.test:9083\\,thrift://host2.unit.test:9083\\,thrift://host3.unit.test:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@UNIT.TEST",
+      helper.replaceVariables("hive.metastore.local=false,hive.metastore.uris=${clusterHostInfo/hive_metastore_host | each(thrift://%s:9083, \\\\,, \\s*\\,\\s*)},hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@${realm}", configurations));
 
     List<String> expected;
     List<String> actual;
@@ -195,38 +197,52 @@ public class VariableReplacementHelperTest {
     actual = new LinkedList<>(Arrays.asList(helper.replaceVariables("${foobar-site/hello | append(foobar-site/data, \\,, true)}", configurations).split(",")));
     Collections.sort(expected);
     Collections.sort(actual);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     expected = new LinkedList<>(Arrays.asList("four", "hello", "one", "there", "three", "two"));
     actual = new LinkedList<>(Arrays.asList(helper.replaceVariables("${foobar-site/hello_there | append(foobar-site/data, \\,, true)}", configurations).split(",")));
     Collections.sort(expected);
     Collections.sort(actual);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     expected = new LinkedList<>(Arrays.asList("four", "hello", "one", "there", "three", "two"));
     actual = new LinkedList<>(Arrays.asList(helper.replaceVariables("${foobar-site/hello_there_one | append(foobar-site/data, \\,, true)}", configurations).split(",")));
     Collections.sort(expected);
     Collections.sort(actual);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     expected = new LinkedList<>(Arrays.asList("four", "hello", "one", "one", "there", "three", "two"));
     actual = new LinkedList<>(Arrays.asList(helper.replaceVariables("${foobar-site/hello_there_one | append(foobar-site/data, \\,, false)}", configurations).split(",")));
     Collections.sort(expected);
     Collections.sort(actual);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     // Test invalid number of arguments.
     try {
       helper.replaceVariables("${foobar-site/hello_there_one | append(foobar-site/data, \\,)}", configurations);
       Assert.fail("Expected IllegalArgumentException");
-    }
-    catch (IllegalArgumentException e) {
+    } catch (IllegalArgumentException e) {
       // Ignore this is expected.
     }
 
-    Assert.assertEquals("test=unit.test", helper.replaceVariables("test=${realm|toLower()}", configurations));
-  
-    Assert.assertEquals("PLAINTEXTSASL://localhost:6667", helper.replaceVariables("${kafka-broker/listeners|replace(\\bPLAINTEXT\\b,PLAINTEXTSASL)}", configurations)); 
+    assertEquals("test=unit.test", helper.replaceVariables("test=${realm|toLower()}", configurations));
+
+    assertEquals("PLAINTEXTSASL://localhost:6667", helper.replaceVariables("${kafka-broker/listeners|replace(\\bPLAINTEXT\\b,PLAINTEXTSASL)}", configurations));
   }
 
-}
+  @Test
+  public void testReplacePrincipalWithPrimary() throws AmbariException {
+    Map<String, Map<String, String>> config = new HashMap<String, Map<String, String>>() {
+      {
+        put("principals", new HashMap<String, String>() {{
+          put("resource_manager_rm", "rm/HOST@EXAMPLE.COM");
+          put("hive_server_hive", "hive@EXAMPLE.COM");
+          put("hdfs", "hdfs");
+        }});
+      }
+    };
+    assertEquals("hdfs", helper.replaceVariables("${principals/hdfs|principalPrimary()}", config));
+    assertEquals("rm", helper.replaceVariables("${principals/resource_manager_rm|principalPrimary()}", config));
+    assertEquals("hive", helper.replaceVariables("${principals/hive_server_hive|principalPrimary()}", config));
+  }
+}
\ No newline at end of file


[16/50] [abbrv] ambari git commit: AMBARI-21096. ADDENDUM. Provide additional logging for config audit log (alejandro)

Posted by nc...@apache.org.
AMBARI-21096. ADDENDUM. Provide additional logging for config audit log (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bdadb704
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bdadb704
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bdadb704

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bdadb704b8982915575ea65d3c42447abcb75d6e
Parents: e801b41
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Jun 7 11:28:46 2017 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Jun 7 15:15:38 2017 -0700

----------------------------------------------------------------------
 .../server/controller/AmbariManagementControllerImpl.java       | 5 ++++-
 .../server/controller/internal/ConfigGroupResourceProvider.java | 3 ++-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bdadb704/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index b67b45b..1eeb82b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -1777,7 +1777,10 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
             }
           }
           note = cr.getServiceConfigVersionNote();
-          configs.add(cluster.getConfig(configType, cr.getVersionTag()));
+          Config config = cluster.getConfig(configType, cr.getVersionTag());
+          if (null != config) {
+            configs.add(config);
+          }
         }
         if (!configs.isEmpty()) {
           Map<String, Config> existingConfigTypeToConfig = new HashMap();

http://git-wip-us.apache.org/repos/asf/ambari/blob/bdadb704/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index d2b4a84..cc23177 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -657,10 +657,11 @@ public class ConfigGroupResourceProvider extends
         serviceName = requestServiceName;
       }
 
+      int numHosts = (null != configGroup.getHosts()) ? configGroup.getHosts().size() : 0;
       configLogger.info("(configchange) Updating configuration group host membership or config value. cluster: '{}', changed by: '{}', " +
               "service_name: '{}', config group: '{}', tag: '{}', num hosts in config group: '{}', note: '{}'",
           cluster.getClusterName(), getManagementController().getAuthName(),
-          serviceName, request.getGroupName(), request.getTag(), configGroup.getHosts().size(), request.getServiceConfigVersionNote());
+          serviceName, request.getGroupName(), request.getTag(), numHosts, request.getServiceConfigVersionNote());
 
       if (!request.getConfigs().isEmpty()) {
         List<String> affectedConfigTypeList = new ArrayList(request.getConfigs().keySet());


[08/50] [abbrv] ambari git commit: AMBARI-20884. Compilation error due to import from relocated package

Posted by nc...@apache.org.
AMBARI-20884. Compilation error due to import from relocated package


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e61fea51
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e61fea51
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e61fea51

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e61fea51b5ffb9c74f746810713a7d9f1f27184f
Parents: d8d5868
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Tue Jun 6 13:08:21 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Tue Jun 6 13:09:50 2017 +0200

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/orm/entities/UpgradeEntity.java | 3 ++-
 .../apache/ambari/server/orm/entities/UpgradeHistoryEntity.java   | 3 ++-
 .../ambari/server/serveraction/upgrades/UpgradeActionTest.java    | 2 +-
 3 files changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e61fea51/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
index 975d6a6..43b2e08 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
@@ -41,7 +41,8 @@ import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.hadoop.metrics2.sink.relocated.google.common.base.Objects;
+
+import com.google.common.base.Objects;
 
 /**
  * Models the data representation of an upgrade

http://git-wip-us.apache.org/repos/asf/ambari/blob/e61fea51/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeHistoryEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeHistoryEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeHistoryEntity.java
index 8bfafd3..0f7ac72 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeHistoryEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeHistoryEntity.java
@@ -31,7 +31,8 @@ import javax.persistence.TableGenerator;
 import javax.persistence.UniqueConstraint;
 
 import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.hadoop.metrics2.sink.relocated.google.common.base.Objects;
+
+import com.google.common.base.Objects;
 
 /**
  * The {@link UpgradeHistoryEntity} represents the version history of components

http://git-wip-us.apache.org/repos/asf/ambari/blob/e61fea51/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index 35fffda..b6fa6d2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -79,7 +79,7 @@ import org.apache.ambari.server.state.UpgradeState;
 import org.apache.ambari.server.state.stack.UpgradePack;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.apache.ambari.server.utils.EventBusSynchronizer;
-import org.apache.hadoop.metrics2.sink.relocated.commons.lang.StringUtils;
+import org.apache.commons.lang.StringUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;


[07/50] [abbrv] ambari git commit: AMBARI-21168. Deleting host from cluster leaves Ambari in inconsistent state (intermittently) (magyari_sandor)

Posted by nc...@apache.org.
AMBARI-21168. Deleting host from cluster leaves Ambari in inconsistent state (intermittently) (magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d8d58688
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d8d58688
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d8d58688

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d8d586888d415d1c429ff6514e5b8435f6cb7e47
Parents: 0dd9fbf
Author: Sandor Magyari <sm...@hortonworks.com>
Authored: Fri Jun 2 15:38:06 2017 +0200
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Tue Jun 6 13:08:33 2017 +0200

----------------------------------------------------------------------
 .../server/state/svccomphost/ServiceComponentHostImpl.java      | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d8d58688/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index bb51733..9704dc5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@ -871,6 +871,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
   }
 
   @Override
+  @Transactional
   public void setState(State state) {
     stateMachine.setCurrentState(state);
     HostComponentStateEntity stateEntity = getStateEntity();
@@ -899,6 +900,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
   }
 
   @Override
+  @Transactional
   public void setVersion(String version) {
     HostComponentStateEntity stateEntity = getStateEntity();
     if (stateEntity != null) {
@@ -926,6 +928,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
   }
 
   @Override
+  @Transactional
   public void setSecurityState(SecurityState securityState) {
     HostComponentStateEntity stateEntity = getStateEntity();
     if (stateEntity != null) {
@@ -975,6 +978,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
    * @param upgradeState  the upgrade state
    */
   @Override
+  @Transactional
   public void setUpgradeState(UpgradeState upgradeState) {
     HostComponentStateEntity stateEntity = getStateEntity();
     if (stateEntity != null) {
@@ -1003,6 +1007,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
 
 
   @Override
+  @Transactional
   public void handleEvent(ServiceComponentHostEvent event)
       throws InvalidStateTransitionException {
     if (LOG.isDebugEnabled()) {


[29/50] [abbrv] ambari git commit: AMBARI-21137. Blueprint export should allow tokenized values in SingleHostUpdater (Amruta Borkar via alejandro)

Posted by nc...@apache.org.
AMBARI-21137. Blueprint export should allow tokenized values in SingleHostUpdater (Amruta Borkar via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b98f07f9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b98f07f9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b98f07f9

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b98f07f9093a0b9635443f317e96768b2d8b8ef7
Parents: 9c302dc
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Jun 8 10:33:06 2017 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Jun 8 10:33:06 2017 -0700

----------------------------------------------------------------------
 .../BlueprintConfigurationProcessor.java         | 19 ++++++++++++++++++-
 .../BlueprintConfigurationProcessorTest.java     |  3 +++
 2 files changed, 21 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b98f07f9/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 508bf15..7ebefdd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -139,6 +139,11 @@ public class BlueprintConfigurationProcessor {
   private static Pattern LOCALHOST_PORT_REGEX = Pattern.compile("localhost:?(\\d+)?");
 
   /**
+   * Compiled regex for placeholder
+   */
+  private static final Pattern PLACEHOLDER = Pattern.compile("\\{\\{.*\\}\\}");
+
+  /**
    * Special network address
    */
   private static String BIND_ALL_IP_ADDRESS = "0.0.0.0";
@@ -1133,7 +1138,8 @@ public class BlueprintConfigurationProcessor {
           if (! matchedHost &&
               ! isNameServiceProperty(propertyName) &&
               ! isSpecialNetworkAddress(propValue)  &&
-              ! isUndefinedAddress(propValue)) {
+              ! isUndefinedAddress(propValue) &&
+              ! isPlaceholder(propValue)) {
 
             configuration.removeProperty(type, propertyName);
           }
@@ -1143,6 +1149,17 @@ public class BlueprintConfigurationProcessor {
   }
 
   /**
+   * Determine if a property is a placeholder
+   *
+   * @param propertyValue  property value
+   *
+   * @return true if the property has format "{{%s}}"
+   */
+  private static boolean isPlaceholder(String propertyValue) {
+    return PLACEHOLDER.matcher(propertyValue).find();
+  }
+
+  /**
    * Determines if a given property name's value can include
    *   nameservice references instead of host names.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/b98f07f9/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
index 24fc3c7..ca579ea 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
@@ -426,6 +426,7 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport {
     Map<String, Map<String, String>> group2Properties = new HashMap<>();
     Map<String, String> group2YarnSiteProps = new HashMap<>();
     group2YarnSiteProps.put("yarn.resourcemanager.resource-tracker.address", "testhost");
+    group2YarnSiteProps.put("yarn.resourcemanager.webapp.https.address", "{{rm_host}}");
     group2Properties.put("yarn-site", group2YarnSiteProps);
     // host group config -> BP config -> cluster scoped config
     Configuration group2BPConfiguration = new Configuration(Collections.<String, Map<String, String>>emptyMap(),
@@ -449,6 +450,8 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport {
     assertEquals("%HOSTGROUP::group1%", properties.get("yarn-site").get("yarn.resourcemanager.hostname"));
     assertEquals("%HOSTGROUP::group1%",
       group2Configuration.getPropertyValue("yarn-site", "yarn.resourcemanager.resource-tracker.address"));
+    assertNotNull("Placeholder property should not have been removed.",
+      group2Configuration.getPropertyValue("yarn-site", "yarn.resourcemanager.webapp.https.address"));
   }
 
   @Test


[24/50] [abbrv] ambari git commit: AMBARI-21054. Add ppc as a new OS for User. (aonishuk)

Posted by nc...@apache.org.
AMBARI-21054. Add ppc as a new OS for User. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aa6b3308
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aa6b3308
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aa6b3308

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: aa6b3308c44b1e0e74edc41e96e078e3597bed75
Parents: 89797ea
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Jun 8 17:11:42 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Jun 8 17:11:42 2017 +0300

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java           |  2 +-
 .../AmbariManagementControllerImplTest.java       | 18 ++++++++++--------
 2 files changed, 11 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aa6b3308/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 1eeb82b..2a9d6c9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -315,7 +315,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   @Inject
   private StackDAO stackDAO;
   @Inject
-  private OsFamily osFamily;
+  protected OsFamily osFamily;
 
   /**
    * The KerberosHelper to help setup for enabling for disabling Kerberos

http://git-wip-us.apache.org/repos/asf/ambari/blob/aa6b3308/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index 0312579..c0e3ef1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -134,7 +134,6 @@ public class AmbariManagementControllerImplTest {
   private static final AmbariMetaInfo ambariMetaInfo = createMock(AmbariMetaInfo.class);
   private static final Users users = createMock(Users.class);
   private static final AmbariSessionManager sessionManager = createNiceMock(AmbariSessionManager.class);
-  private static final OsFamily osFamily = createNiceMock(OsFamily.class);
 
   @BeforeClass
   public static void setupAuthentication() {
@@ -146,7 +145,7 @@ public class AmbariManagementControllerImplTest {
 
   @Before
   public void before() throws Exception {
-    reset(ldapDataPopulator, clusters, actionDBAccessor, ambariMetaInfo, users, sessionManager, osFamily);
+    reset(ldapDataPopulator, clusters, actionDBAccessor, ambariMetaInfo, users, sessionManager);
   }
 
   @Test
@@ -1990,7 +1989,7 @@ public class AmbariManagementControllerImplTest {
   @Test
   public void testPopulateServicePackagesInfo() throws Exception {
     Capture<AmbariManagementController> controllerCapture = EasyMock.newCapture();
-    Injector injector = Guice.createInjector(Modules.override(new InMemoryDefaultTestModule()).with(new MockModule()));
+    Injector injector = createStrictMock(Injector.class);
     MaintenanceStateHelper maintHelper = createNiceMock(MaintenanceStateHelper.class);
 
     ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
@@ -2025,15 +2024,18 @@ public class AmbariManagementControllerImplTest {
     expect(injector.getInstance(Gson.class)).andReturn(null);
     expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(maintHelper).anyTimes();
     expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class));
+    
+    OsFamily osFamilyMock = createNiceMock(OsFamily.class);
 
-    replay(maintHelper, injector, clusters, serviceInfo);
+    EasyMock.expect(osFamilyMock.isVersionedOsFamilyExtendedByVersionedFamily("testOSFamily", "testOSFamily")).andReturn(true).times(3);
+    replay(maintHelper, injector, clusters, serviceInfo, osFamilyMock);
 
     AmbariManagementControllerImplTest.NestedTestClass nestedTestClass = this.new NestedTestClass(null, clusters,
-        injector);
+        injector, osFamilyMock);
 
     ServiceOsSpecific serviceOsSpecific = nestedTestClass.populateServicePackagesInfo(serviceInfo, hostParams, osFamily);
 
-    assertEquals(serviceOsSpecific.getPackages().size(), 3);
+    assertEquals(3, serviceOsSpecific.getPackages().size());
   }
 
   @Test
@@ -2200,14 +2202,14 @@ public class AmbariManagementControllerImplTest {
       binder.bind(AmbariMetaInfo.class).toInstance(ambariMetaInfo);
       binder.bind(Users.class).toInstance(users);
       binder.bind(AmbariSessionManager.class).toInstance(sessionManager);
-      binder.bind(OsFamily.class).toInstance(osFamily);
     }
   }
 
   private class NestedTestClass extends AmbariManagementControllerImpl {
 
-    public NestedTestClass(ActionManager actionManager, Clusters clusters, Injector injector) throws Exception {
+    public NestedTestClass(ActionManager actionManager, Clusters clusters, Injector injector, OsFamily osFamilyMock) throws Exception {
       super(actionManager, clusters, injector);
+      this.osFamily = osFamilyMock;
     }
 
 //    public ServiceOsSpecific testPopulateServicePackagesInfo(ServiceInfo serviceInfo, Map<String, String> hostParams,


[28/50] [abbrv] ambari git commit: AMBARI-21197 Disable Kerberos UI step by step panel misalign with the Complete button at the bottom of the page (dili)

Posted by nc...@apache.org.
AMBARI-21197 Disable Kerberos UI step by step panel misalign with the Complete button at the bottom of the page (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9c302dcd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9c302dcd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9c302dcd

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 9c302dcd237af0ba9b5099ddc3bf7b94e29496cd
Parents: 7e3641e
Author: Di Li <di...@apache.org>
Authored: Thu Jun 8 13:21:06 2017 -0400
Committer: Di Li <di...@apache.org>
Committed: Thu Jun 8 13:21:06 2017 -0400

----------------------------------------------------------------------
 ambari-web/app/templates/common/progress.hbs             | 4 ++++
 ambari-web/app/views/main/admin/kerberos/disable_view.js | 3 ++-
 2 files changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9c302dcd/ambari-web/app/templates/common/progress.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/progress.hbs b/ambari-web/app/templates/common/progress.hbs
index ab59328..417498a 100644
--- a/ambari-web/app/templates/common/progress.hbs
+++ b/ambari-web/app/templates/common/progress.hbs
@@ -15,7 +15,11 @@
 * See the License for the specific language governing permissions and
 * limitations under the License.
 }}
+{{#if view.disableKerberos}}
+<div id="common-progress-page" class="wizard-content col-md-12">
+{{else}}
 <div id="common-progress-page" class="wizard-content col-md-9">
+{{/if}}
   <h4 class="step-title">{{view.headerTitle}}</h4>
 
   <div {{bindAttr class="view.noticeClass"}}>{{{view.notice}}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c302dcd/ambari-web/app/views/main/admin/kerberos/disable_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/kerberos/disable_view.js b/ambari-web/app/views/main/admin/kerberos/disable_view.js
index f7b3527..c3f8902 100644
--- a/ambari-web/app/views/main/admin/kerberos/disable_view.js
+++ b/ambari-web/app/views/main/admin/kerberos/disable_view.js
@@ -37,6 +37,7 @@ App.KerberosDisableView = App.KerberosProgressPageView.extend({
 
   msgColor: 'alert-info',
 
-  isSimpleModal: true
+  isSimpleModal: true,
 
+  disableKerberos: true
 });


[34/50] [abbrv] ambari git commit: AMBARI-20929: Changes in Zeppelin JDBC config after ZEPPELIN-2367 (Prabhjyot Singh via nitirajrathore)

Posted by nc...@apache.org.
AMBARI-20929: Changes in Zeppelin JDBC config after ZEPPELIN-2367 (Prabhjyot Singh via nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/215bd7aa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/215bd7aa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/215bd7aa

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 215bd7aaa3cdefe1bdc9242c4fa8e9d0106d8740
Parents: 57bb136
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Fri Jun 9 14:19:38 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Fri Jun 9 14:20:37 2017 +0530

----------------------------------------------------------------------
 .../common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/215bd7aa/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index c2f81639..3d516b0 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -302,7 +302,7 @@ class Master(Script):
                                                        params.hbase_zookeeper_quorum + ':' + \
                                                        params.zookeeper_znode_parent
         else:
-          interpreter['properties']['zeppelin.jdbc.auth.type'] = ""
+          interpreter['properties']['zeppelin.jdbc.auth.type'] = "SIMPLE"
           interpreter['properties']['zeppelin.jdbc.principal'] = ""
           interpreter['properties']['zeppelin.jdbc.keytab.location'] = ""
       elif interpreter['group'] == 'sh':
@@ -354,6 +354,7 @@ class Master(Script):
           interpreter['properties']['hive.driver'] = 'org.apache.hive.jdbc.HiveDriver'
           interpreter['properties']['hive.user'] = 'hive'
           interpreter['properties']['hive.password'] = ''
+          interpreter['properties']['hive.proxy.user.property'] = 'hive.server2.proxy.user'
           if params.hive_server2_support_dynamic_service_discovery:
             interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
                                                  params.hive_zookeeper_quorum + \
@@ -367,6 +368,7 @@ class Master(Script):
           interpreter['properties'][hive_interactive_properties_key + '.driver'] = 'org.apache.hive.jdbc.HiveDriver'
           interpreter['properties'][hive_interactive_properties_key + '.user'] = 'hive'
           interpreter['properties'][hive_interactive_properties_key + '.password'] = ''
+          interpreter['properties'][hive_interactive_properties_key + '.property'] = 'hive.server2.proxy.user'
           if params.hive_server2_support_dynamic_service_discovery:
             interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \
                                                     params.hive_zookeeper_quorum + \


[02/50] [abbrv] ambari git commit: AMBARI-21164. Upgrades (RU/EU) : "stack.upgrade.bypass.prechecks" config is not honored while doing upgrades with bad entries in "execution_command" table. (aonishuk)

Posted by nc...@apache.org.
AMBARI-21164. Upgrades (RU/EU) : "stack.upgrade.bypass.prechecks" config is not honored while doing upgrades with bad entries in "execution_command" table. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b3425c98
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b3425c98
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b3425c98

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b3425c9841b4153b1cf3b15dc6f55e67f1754f3b
Parents: 499814a
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Jun 6 13:39:20 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Jun 6 13:39:20 2017 +0300

----------------------------------------------------------------------
 .../server/checks/AbstractCheckDescriptor.java  |  7 ----
 .../checks/ServiceCheckValidityCheck.java       |  9 +++--
 .../PreUpgradeCheckResourceProvider.java        |  6 ++-
 .../apache/ambari/server/state/CheckHelper.java | 37 ++++++++-----------
 .../checks/ServiceCheckValidityCheckTest.java   | 39 +++++++++++++++++++-
 .../PreUpgradeCheckResourceProviderTest.java    | 15 +++++++-
 .../sample/checks/SampleServiceCheck.java       |  4 --
 .../ambari/server/state/CheckHelperTest.java    | 36 ++++++++++++------
 8 files changed, 104 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/main/java/org/apache/ambari/server/checks/AbstractCheckDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/AbstractCheckDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/AbstractCheckDescriptor.java
index a0affd0..fddded7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/AbstractCheckDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/AbstractCheckDescriptor.java
@@ -335,11 +335,4 @@ public abstract class AbstractCheckDescriptor {
     return false;
   }
 
-  /**
-   * Return a boolean indicating whether or not configs allow bypassing errors during the RU/EU PreChecks.
-   * @return
-   */
-  public boolean isStackUpgradeAllowedToBypassPreChecks() {
-    return config.isUpgradePrecheckBypass();
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
index 750b25e..ad68a2c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
@@ -78,8 +78,10 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
   private static SortRequest sortRequest = new SortRequestImpl(sortRequestProperties);
   private static final PageRequestImpl PAGE_REQUEST = new PageRequestImpl(PageRequest.StartingPoint.End, 1000, 0, null, null);
   private static final RequestImpl REQUEST = new RequestImpl(null, null, null, null, sortRequest, PAGE_REQUEST);
-  private static final Predicate PREDICATE = new PredicateBuilder().property(TaskResourceProvider.TASK_COMMAND_PROPERTY_ID)
-      .equals(RoleCommand.SERVICE_CHECK.name()).toPredicate();
+  private static final Predicate PREDICATE = new PredicateBuilder()
+    .property(TaskResourceProvider.TASK_COMMAND_PROPERTY_ID).equals(RoleCommand.SERVICE_CHECK.name())
+    .and().property(TaskResourceProvider.TASK_START_TIME_PROPERTY_ID).greaterThan(-1)
+    .toPredicate();
 
 
 
@@ -89,6 +91,7 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
   @Inject
   Provider<HostRoleCommandDAO> hostRoleCommandDAOProvider;
 
+
   /**
    * Constructor.
    */
@@ -147,7 +150,7 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
 
       boolean serviceCheckWasExecuted = false;
       for (HostRoleCommandEntity command : latestTimestamps.values()) {
-        if (command.getCommandDetail().contains(serviceName)) {
+        if (null !=  command.getCommandDetail() && command.getCommandDetail().contains(serviceName)) {
           serviceCheckWasExecuted = true;
           Long serviceCheckTimestamp = command.getStartTime();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
index afbd981..8f00456 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
@@ -29,6 +29,7 @@ import org.apache.ambari.server.ParentObjectNotFoundException;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.checks.AbstractCheckDescriptor;
 import org.apache.ambari.server.checks.UpgradeCheckRegistry;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
@@ -95,6 +96,9 @@ public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
   private static Provider<UpgradeHelper> upgradeHelper;
 
   @Inject
+  private static Provider<Configuration> config;
+
+  @Inject
   private static CheckHelper checkHelper;
 
   private static Set<String> pkPropertyIds = Collections.singleton(UPGRADE_CHECK_ID_PROPERTY_ID);
@@ -206,7 +210,7 @@ public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
         LOG.error("Failed to register custom prechecks for the services", e);
       }
 
-      for (PrerequisiteCheck prerequisiteCheck : checkHelper.performChecks(upgradeCheckRequest, upgradeChecksToRun)) {
+      for (PrerequisiteCheck prerequisiteCheck : checkHelper.performChecks(upgradeCheckRequest, upgradeChecksToRun, config.get())) {
         final Resource resource = new ResourceImpl(Resource.Type.PreUpgradeCheck);
         setResourceProperty(resource, UPGRADE_CHECK_ID_PROPERTY_ID, prerequisiteCheck.getId(), requestedIds);
         setResourceProperty(resource, UPGRADE_CHECK_CHECK_PROPERTY_ID, prerequisiteCheck.getDescription(), requestedIds);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
index 5ea039c..f3fbc63 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.checks.AbstractCheckDescriptor;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
 import org.apache.ambari.server.state.stack.PrereqCheckStatus;
 import org.apache.ambari.server.state.stack.PrerequisiteCheck;
@@ -86,44 +87,38 @@ public class CheckHelper {
    * @return list of pre-requisite check results
    */
   public List<PrerequisiteCheck> performChecks(PrereqCheckRequest request,
-      List<AbstractCheckDescriptor> checksRegistry) {
+                                               List<AbstractCheckDescriptor> checksRegistry, Configuration config) {
 
     final String clusterName = request.getClusterName();
     final List<PrerequisiteCheck> prerequisiteCheckResults = new ArrayList<>();
+    final boolean canBypassPreChecks = config.isUpgradePrecheckBypass();
 
     List<DescriptorPreCheck> applicablePreChecks = getApplicablePrerequisiteChecks(request, checksRegistry);
+
     for (DescriptorPreCheck descriptorPreCheck : applicablePreChecks) {
       AbstractCheckDescriptor checkDescriptor = descriptorPreCheck.descriptor;
       PrerequisiteCheck prerequisiteCheck = descriptorPreCheck.check;
       try {
         checkDescriptor.perform(prerequisiteCheck, request);
-
-        boolean canBypassPreChecks = checkDescriptor.isStackUpgradeAllowedToBypassPreChecks();
-
-        if (prerequisiteCheck.getStatus() == PrereqCheckStatus.FAIL && canBypassPreChecks) {
-          LOG.error("Check {} failed but stack upgrade is allowed to bypass failures. Error to bypass: {}. Failed on: {}",
-              checkDescriptor.getDescription().name(),
-              prerequisiteCheck.getFailReason(),
-              StringUtils.join(prerequisiteCheck.getFailedOn(), ", "));
-          prerequisiteCheck.setStatus(PrereqCheckStatus.BYPASS);
-        }
-        prerequisiteCheckResults.add(prerequisiteCheck);
-
-        request.addResult(checkDescriptor.getDescription(), prerequisiteCheck.getStatus());
       } catch (ClusterNotFoundException ex) {
-        prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
         prerequisiteCheck.setFailReason("Cluster with name " + clusterName + " doesn't exists");
-        prerequisiteCheckResults.add(prerequisiteCheck);
-
-        request.addResult(checkDescriptor.getDescription(), prerequisiteCheck.getStatus());
+        prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
       } catch (Exception ex) {
         LOG.error("Check " + checkDescriptor.getDescription().name() + " failed", ex);
-        prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
         prerequisiteCheck.setFailReason("Unexpected server error happened");
-        prerequisiteCheckResults.add(prerequisiteCheck);
+        prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
+      }
 
-        request.addResult(checkDescriptor.getDescription(), prerequisiteCheck.getStatus());
+      if (prerequisiteCheck.getStatus() == PrereqCheckStatus.FAIL && canBypassPreChecks) {
+        LOG.error("Check {} failed but stack upgrade is allowed to bypass failures. Error to bypass: {}. Failed on: {}",
+          checkDescriptor.getDescription().name(),
+          prerequisiteCheck.getFailReason(),
+          StringUtils.join(prerequisiteCheck.getFailedOn(), ", "));
+        prerequisiteCheck.setStatus(PrereqCheckStatus.BYPASS);
       }
+
+      prerequisiteCheckResults.add(prerequisiteCheck);
+      request.addResult(checkDescriptor.getDescription(), prerequisiteCheck.getStatus());
     }
 
     return prerequisiteCheckResults;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
index 4d8a109..91fd72a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.ambari.server.checks;
 
+import static java.util.Arrays.asList;
 import static java.util.Collections.singletonList;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.eq;
@@ -117,6 +118,42 @@ public class ServiceCheckValidityCheckTest {
   }
 
   @Test
+  public void testWithNullCommandDetailAtCommand() throws AmbariException {
+    ServiceComponent serviceComponent = mock(ServiceComponent.class);
+    when(serviceComponent.isVersionAdvertised()).thenReturn(true);
+
+    when(service.getMaintenanceState()).thenReturn(MaintenanceState.OFF);
+    when(service.getServiceComponents()).thenReturn(ImmutableMap.of(SERVICE_COMPONENT_NAME, serviceComponent));
+
+    ServiceConfigEntity serviceConfigEntity = new ServiceConfigEntity();
+    serviceConfigEntity.setServiceName(SERVICE_NAME);
+    serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
+
+    HostRoleCommandEntity hostRoleCommandEntity1 = new HostRoleCommandEntity();
+    hostRoleCommandEntity1.setRoleCommand(RoleCommand.SERVICE_CHECK);
+    hostRoleCommandEntity1.setCommandDetail(null);
+    hostRoleCommandEntity1.setStartTime(SERVICE_CHECK_START_TIME);
+    hostRoleCommandEntity1.setRole(Role.ZOOKEEPER_SERVER);
+
+    HostRoleCommandEntity hostRoleCommandEntity2 = new HostRoleCommandEntity();
+    hostRoleCommandEntity2.setRoleCommand(RoleCommand.SERVICE_CHECK);
+    hostRoleCommandEntity2.setCommandDetail(COMMAND_DETAIL);
+    hostRoleCommandEntity2.setStartTime(SERVICE_CHECK_START_TIME);
+    hostRoleCommandEntity2.setRole(Role.HDFS_SERVICE_CHECK);
+
+    when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
+    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
+    try {
+      serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
+    } catch (NullPointerException ex){
+      Assert.fail("serviceCheckValidityCheck failed due to null at start_time were not handled");
+    }
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+
+  @Test
   public void testFailWhenServiceWithOutdatedServiceCheckExists() throws AmbariException {
     ServiceComponent serviceComponent = mock(ServiceComponent.class);
     when(serviceComponent.isVersionAdvertised()).thenReturn(true);
@@ -188,7 +225,7 @@ public class ServiceCheckValidityCheckTest {
     hostRoleCommandEntity2.setRole(Role.HDFS_SERVICE_CHECK);
 
     when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
-    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(Arrays.asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
+    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
 
     PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
     serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
index 5287c4d..2a48fa6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
@@ -38,6 +38,7 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.checks.AbstractCheckDescriptor;
 import org.apache.ambari.server.checks.UpgradeCheckRegistry;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
@@ -84,6 +85,7 @@ public class PreUpgradeCheckResourceProviderTest {
 
     Clusters clusters = injector.getInstance(Clusters.class);
     UpgradeHelper upgradeHelper = injector.getInstance(UpgradeHelper.class);
+    Configuration configuration = injector.getInstance(Configuration.class);
 
     RepositoryVersionDAO repoDao = injector.getInstance(RepositoryVersionDAO.class);
     RepositoryVersionEntity repo = createNiceMock(RepositoryVersionEntity.class);
@@ -104,6 +106,7 @@ public class PreUpgradeCheckResourceProviderTest {
     Map<String, ServiceInfo> allServiceInfoMap = new HashMap<>();
     allServiceInfoMap.put("Service100", serviceInfo);
 
+    expect(configuration.isUpgradePrecheckBypass()).andReturn(false).anyTimes();
     // set expectations
     expect(managementController.getClusters()).andReturn(clusters).anyTimes();
     expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
@@ -135,7 +138,7 @@ public class PreUpgradeCheckResourceProviderTest {
 
     // replay
     replay(managementController, clusters, cluster, service, serviceInfo, repoDao, repo, upgradeHelper,
-        ambariMetaInfo, upgradePack, config, currentStackId, targetStackId, serviceFactory);
+        ambariMetaInfo, upgradePack, config, currentStackId, targetStackId, serviceFactory, configuration);
 
     ResourceProvider provider = getPreUpgradeCheckResourceProvider(managementController, injector);
     // create the request
@@ -198,6 +201,15 @@ public class PreUpgradeCheckResourceProviderTest {
     }
   }
 
+  static class TestConfigurationProvider implements Provider<Configuration> {
+    private static Configuration configuration = createNiceMock(Configuration.class);
+
+    @Override
+    public Configuration get(){
+      return configuration;
+    }
+  }
+
   static class TestUpgradeHelperProvider implements Provider<UpgradeHelper> {
     private static UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class);
 
@@ -216,6 +228,7 @@ public class PreUpgradeCheckResourceProviderTest {
         CheckHelper checkHelper = new CheckHelper();
         UpgradeCheckRegistry registry = new UpgradeCheckRegistry();
 
+        bind(Configuration.class).toProvider(TestConfigurationProvider.class);
         bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class));
         bind(CheckHelper.class).toInstance(checkHelper);
         bind(Clusters.class).toProvider(TestClustersProvider.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java b/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
index 1c16040..3a07928 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
@@ -44,9 +44,5 @@ public class SampleServiceCheck extends AbstractCheckDescriptor {
     prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
   }
 
-  @Override
-  public boolean isStackUpgradeAllowedToBypassPreChecks() {
-    return false;
-  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3425c98/ambari-server/src/test/java/org/apache/ambari/server/state/CheckHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/CheckHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/CheckHelperTest.java
index 02e84df..a25ad08 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/CheckHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/CheckHelperTest.java
@@ -28,6 +28,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.checks.AbstractCheckDescriptor;
 import org.apache.ambari.server.checks.CheckDescription;
 import org.apache.ambari.server.checks.ServicesUpCheck;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
@@ -63,15 +64,18 @@ public class CheckHelperTest {
   @Test
   public void testPreUpgradeCheck() throws Exception {
     final CheckHelper helper = new CheckHelper();
+    Configuration configuration = EasyMock.createNiceMock(Configuration.class);
     List<AbstractCheckDescriptor> updateChecksRegistry = new ArrayList<>();
     AbstractCheckDescriptor descriptor = EasyMock.createNiceMock(AbstractCheckDescriptor.class);
+
+    EasyMock.expect(configuration.isUpgradePrecheckBypass()).andReturn(false);
     descriptor.perform(EasyMock.<PrerequisiteCheck> anyObject(), EasyMock.<PrereqCheckRequest> anyObject());
     EasyMock.expectLastCall().times(1);
     EasyMock.expect(descriptor.isApplicable(EasyMock.<PrereqCheckRequest> anyObject())).andReturn(true);
-    EasyMock.replay(descriptor);
+    EasyMock.replay(descriptor, configuration);
     updateChecksRegistry.add(descriptor);
 
-    helper.performChecks(new PrereqCheckRequest("cluster"), updateChecksRegistry);
+    helper.performChecks(new PrereqCheckRequest("cluster"), updateChecksRegistry, configuration);
     EasyMock.verify(descriptor);
   }
 
@@ -81,12 +85,14 @@ public class CheckHelperTest {
   @Test
   public void testPreUpgradeCheckNotApplicable() throws Exception {
     final CheckHelper helper = new CheckHelper();
+    Configuration configuration = EasyMock.createNiceMock(Configuration.class);
     List<AbstractCheckDescriptor> updateChecksRegistry = new ArrayList<>();
     AbstractCheckDescriptor descriptor = EasyMock.createNiceMock(AbstractCheckDescriptor.class);
+    EasyMock.expect(configuration.isUpgradePrecheckBypass()).andReturn(false);
     EasyMock.expect(descriptor.isApplicable(EasyMock.<PrereqCheckRequest> anyObject())).andReturn(false);
-    EasyMock.replay(descriptor);
+    EasyMock.replay(descriptor, configuration);
     updateChecksRegistry.add(descriptor);
-    helper.performChecks(new PrereqCheckRequest("cluster"), updateChecksRegistry);
+    helper.performChecks(new PrereqCheckRequest("cluster"), updateChecksRegistry, configuration);
     EasyMock.verify(descriptor);
   }
 
@@ -98,14 +104,16 @@ public class CheckHelperTest {
     final CheckHelper helper = new CheckHelper();
     List<AbstractCheckDescriptor> updateChecksRegistry = new ArrayList<>();
     AbstractCheckDescriptor descriptor = EasyMock.createNiceMock(AbstractCheckDescriptor.class);
+    Configuration configuration = EasyMock.createNiceMock(Configuration.class);
 
+    EasyMock.expect(configuration.isUpgradePrecheckBypass()).andReturn(false);
     descriptor.perform(EasyMock.<PrerequisiteCheck> anyObject(), EasyMock.<PrereqCheckRequest> anyObject());
     EasyMock.expectLastCall().andThrow(new AmbariException("error"));
     EasyMock.expect(descriptor.isApplicable(EasyMock.<PrereqCheckRequest> anyObject())).andReturn(true);
     EasyMock.expect(descriptor.getDescription()).andReturn(CheckDescription.HOSTS_HEARTBEAT).anyTimes();
-    EasyMock.replay(descriptor);
+    EasyMock.replay(descriptor, configuration);
     updateChecksRegistry.add(descriptor);
-    final List<PrerequisiteCheck> upgradeChecks = helper.performChecks(new PrereqCheckRequest("cluster"), updateChecksRegistry);
+    final List<PrerequisiteCheck> upgradeChecks = helper.performChecks(new PrereqCheckRequest("cluster"), updateChecksRegistry, configuration);
     EasyMock.verify(descriptor);
     Assert.assertEquals(PrereqCheckStatus.FAIL, upgradeChecks.get(0).getStatus());
   }
@@ -118,13 +126,15 @@ public class CheckHelperTest {
     // This mock class extends CheckHelper and overrides the getPrerequisiteChecks method in order to return
     // a PrerequisiteCheck object whose status is FAIL.
     final CheckHelperMock helper =  new CheckHelperMock();
+    Configuration configuration = EasyMock.createNiceMock(Configuration.class);
     List<AbstractCheckDescriptor> updateChecksRegistry = new ArrayList<>();
 
     PrereqCheckRequest checkRequest = EasyMock.createNiceMock(PrereqCheckRequest.class);
+    EasyMock.expect(configuration.isUpgradePrecheckBypass()).andReturn(true);
     EasyMock.expect(checkRequest.getClusterName()).andReturn("c1").anyTimes();
-    EasyMock.replay(checkRequest);
+    EasyMock.replay(checkRequest, configuration);
 
-    final List<PrerequisiteCheck> upgradeChecks = helper.performChecks(checkRequest, updateChecksRegistry);
+    final List<PrerequisiteCheck> upgradeChecks = helper.performChecks(checkRequest, updateChecksRegistry, configuration);
     Assert.assertEquals(1, upgradeChecks.size());
     Assert.assertEquals(PrereqCheckStatus.BYPASS, upgradeChecks.get(0).getStatus());
   }
@@ -132,6 +142,7 @@ public class CheckHelperTest {
   @Test
   public void testPreUpgradeCheckClusterMissing() throws Exception {
     final Clusters clusters = Mockito.mock(Clusters.class);
+    Configuration configuration = EasyMock.createNiceMock(Configuration.class);
     Mockito.when(clusters.getCluster(Mockito.anyString())).thenAnswer(new Answer<Cluster>() {
       @Override
       public Cluster answer(InvocationOnMock invocation) throws Throwable {
@@ -164,11 +175,15 @@ public class CheckHelperTest {
     final CheckHelper helper = injector.getInstance(CheckHelper.class);
     List<AbstractCheckDescriptor> updateChecksRegistry = new ArrayList<>();
 
+    EasyMock.expect(configuration.isUpgradePrecheckBypass()).andReturn(false);
+
+    EasyMock.replay(configuration);
+
     // mocked Cluster has no services, so the check should always be PASS
     updateChecksRegistry.add(injector.getInstance(ServicesUpCheck.class));
-    List<PrerequisiteCheck> upgradeChecks = helper.performChecks(new PrereqCheckRequest("existing"), updateChecksRegistry);
+    List<PrerequisiteCheck> upgradeChecks = helper.performChecks(new PrereqCheckRequest("existing"), updateChecksRegistry, configuration);
     Assert.assertEquals(PrereqCheckStatus.PASS, upgradeChecks.get(0).getStatus());
-    upgradeChecks = helper.performChecks(new PrereqCheckRequest("non-existing"), updateChecksRegistry);
+    upgradeChecks = helper.performChecks(new PrereqCheckRequest("non-existing"), updateChecksRegistry, configuration);
     Assert.assertEquals(PrereqCheckStatus.FAIL, upgradeChecks.get(0).getStatus());
     //non existing cluster is an expected error
     Assert.assertTrue(!upgradeChecks.get(0).getFailReason().equals("Unexpected server error happened"));
@@ -191,7 +206,6 @@ public class CheckHelperTest {
         EasyMock.expect(descriptor.getDescription()).andReturn(description).anyTimes();
 
         // Allow bypassing failures
-        EasyMock.expect(descriptor.isStackUpgradeAllowedToBypassPreChecks()).andReturn(true);
         EasyMock.replay(descriptor);
 
         applicablePreChecks.add(new DescriptorPreCheck(descriptor, check));


[44/50] [abbrv] ambari git commit: AMBARI-19149. Code cleanup: concatenation in debug messages, unnecessary toString calls

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
index a874f99..c6315a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
@@ -38,7 +38,7 @@ import com.google.inject.Injector;
  */
 public class CachedRoleCommandOrderProvider implements RoleCommandOrderProvider {
 
-  private static Logger LOG = LoggerFactory.getLogger(CachedRoleCommandOrderProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CachedRoleCommandOrderProvider.class);
 
   @Inject
   private Injector injector;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
index fc6c124..ef2c9a9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
@@ -274,8 +274,7 @@ public class RoleCommandOrder implements Cloneable {
           }
 
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Adding dependency for " + restartPair + ", " +
-              "dependencies => " + roleCommandDeps);
+            LOG.debug("Adding dependency for {}, dependencies => {}", restartPair, roleCommandDeps);
           }
           missingDependencies.put(restartPair, roleCommandDeps);
         }
@@ -338,9 +337,7 @@ public class RoleCommandOrder implements Cloneable {
       v1 = dependencies.get(roleCommandPairSetEntry.getKey());
       v2 = rco.dependencies.get(roleCommandPairSetEntry.getKey());
       if (!v1.equals(v2)) {
-        LOG.debug("different entry found for key ("
-          + roleCommandPairSetEntry.getKey().getRole().toString() + ", "
-          + roleCommandPairSetEntry.getKey().getCmd().toString() + ")" );
+        LOG.debug("different entry found for key ({}, {})", roleCommandPairSetEntry.getKey().getRole(), roleCommandPairSetEntry.getKey().getCmd());
         return 1;
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/AmbariPerformanceMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/AmbariPerformanceMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/AmbariPerformanceMonitor.java
index 3453a22..58bcc6f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/AmbariPerformanceMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/AmbariPerformanceMonitor.java
@@ -38,7 +38,7 @@ import com.google.inject.Singleton;
  */
 @Singleton
 public class AmbariPerformanceMonitor extends PerformanceMonitor {
-  private static Logger LOG = LoggerFactory.getLogger(AmbariPerformanceMonitor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AmbariPerformanceMonitor.class);
   private boolean isInitialized = false;
   private DatabaseMetricsSource metricsSource;
   private static String entityPackagePrefix = "org.apache"; //Can be made into a set later if needed.
@@ -111,7 +111,7 @@ public class AmbariPerformanceMonitor extends PerformanceMonitor {
         init();
       }
       if (isInitialized) {
-        LOG.debug("Publishing " + metrics.size() + " metrics to sink.");
+        LOG.debug("Publishing {} metrics to sink.", metrics.size());
         metricsSource.publish(metrics);
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/DatabaseMetricsSource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/DatabaseMetricsSource.java b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/DatabaseMetricsSource.java
index 7062af1..423b19f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/DatabaseMetricsSource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/DatabaseMetricsSource.java
@@ -44,7 +44,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
  * and publishes to configured Metric Sink.
  **/
 public class DatabaseMetricsSource extends AbstractMetricsSource {
-  private static Logger LOG = LoggerFactory.getLogger(DatabaseMetricsSource.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DatabaseMetricsSource.class);
   private static String dbMonitorPrefix = "monitor.";
   private ExecutorService executor;
   private MetricsConfiguration configuration;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/JvmMetricsSource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/JvmMetricsSource.java b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/JvmMetricsSource.java
index 41229cc..348cc4f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/JvmMetricsSource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/JvmMetricsSource.java
@@ -46,7 +46,7 @@ import com.codahale.metrics.jvm.ThreadStatesGaugeSet;
  */
 public class JvmMetricsSource extends AbstractMetricsSource {
   static final MetricRegistry registry = new MetricRegistry();
-  private static Logger LOG = LoggerFactory.getLogger(JvmMetricsSource.class);
+  private static final Logger LOG = LoggerFactory.getLogger(JvmMetricsSource.class);
   private ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
   private static String JVM_PREFIX = "jvm";
   private int interval = 10;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsConfiguration.java
index a940a68..28556f6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsConfiguration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsConfiguration.java
@@ -30,7 +30,7 @@ import org.slf4j.LoggerFactory;
 public class MetricsConfiguration {
   public static final String CONFIG_FILE = "metrics.properties";
 
-  private static Logger LOG = LoggerFactory.getLogger(MetricsConfiguration.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MetricsConfiguration.class);
   private Properties properties;
 
   public static MetricsConfiguration getMetricsConfiguration() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsServiceImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsServiceImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsServiceImpl.java
index ac5b6c2..0172b1d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsServiceImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metrics/system/impl/MetricsServiceImpl.java
@@ -35,7 +35,7 @@ import com.google.inject.Singleton;
 
 @Singleton
 public class MetricsServiceImpl implements MetricsService {
-  private static Logger LOG = LoggerFactory.getLogger(MetricsServiceImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MetricsServiceImpl.class);
   private static Map<String, MetricsSource> sources = new HashMap<>();
   private static MetricsSink sink = null;
   private MetricsConfiguration configuration = null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
index 3b07bc6..9077362 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
@@ -1299,7 +1299,7 @@ public class DBAccessorImpl implements DBAccessor {
     Object dbValue = databasePlatform.convertToDatabaseType(value);
     String valueString = value.toString();
     if (dbValue instanceof String) {
-      valueString = "'" + value.toString() + "'";
+      valueString = "'" + value + "'";
     }
 
     return valueString;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
index 297aef4..d71429c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
@@ -54,7 +54,7 @@ public class AlertDefinitionDAO {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertDefinitionDAO.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertDefinitionDAO.class);
 
   /**
    * JPA entity manager

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index e7c9763..9198686 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -82,7 +82,7 @@ import com.google.inject.Provider;
 })
 @StaticallyInject
 public class RepositoryVersionEntity {
-  private static Logger LOG = LoggerFactory.getLogger(RepositoryVersionEntity.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RepositoryVersionEntity.class);
 
   @Inject
   private static Provider<RepositoryVersionHelper> repositoryVersionHelperProvider;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
index 7e3092d..afa4ac7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
@@ -120,7 +120,7 @@ public class GenericDbmsHelper implements DbmsHelper {
       // no writing to file
     }
 
-    builder.append(writer.toString());
+    builder.append(writer);
 
     return builder;
   }
@@ -435,7 +435,7 @@ public class GenericDbmsHelper implements DbmsHelper {
     Object dbValue = databasePlatform.convertToDatabaseType(value);
     String valueString = value.toString();
     if (dbValue instanceof String || dbValue instanceof Enum) {
-      valueString = "'" + value.toString() + "'";
+      valueString = "'" + value + "'";
     }
 
     return valueString;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/resources/ResourceManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/resources/ResourceManager.java b/ambari-server/src/main/java/org/apache/ambari/server/resources/ResourceManager.java
index 627af41..8fa0f1f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/resources/ResourceManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/resources/ResourceManager.java
@@ -20,8 +20,8 @@ package org.apache.ambari.server.resources;
 import java.io.File;
 
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
@@ -31,7 +31,7 @@ import com.google.inject.Singleton;
  */
 @Singleton
 public class ResourceManager {
-  private static Log LOG = LogFactory.getLog(ResourceManager.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ResourceManager.class);
 
   @Inject Configuration configs;
   /**
@@ -44,10 +44,7 @@ public class ResourceManager {
     String resourcePathIndep = resourcePath.replace("/", File.separator);
     File resourceFile = new File(resDir + File.separator + resourcePathIndep);
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Resource requested from ResourceManager"
-          + ", resourceDir=" + resDir
-          + ", resourcePath=" + resourcePathIndep
-          + ", fileExists=" + resourceFile.exists());
+      LOG.debug("Resource requested from ResourceManager, resourceDir={}, resourcePath={}, fileExists={}", resDir, resourcePathIndep, resourceFile.exists());
     }
     return resourceFile;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/resources/api/rest/GetResource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/resources/api/rest/GetResource.java b/ambari-server/src/main/java/org/apache/ambari/server/resources/api/rest/GetResource.java
index 53266c2..f81c13a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/resources/api/rest/GetResource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/resources/api/rest/GetResource.java
@@ -32,8 +32,8 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
 import org.apache.ambari.server.resources.ResourceManager;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 
@@ -42,7 +42,7 @@ import com.google.inject.Inject;
  */
 @Path("/")
 public class GetResource {
-  private static Log LOG = LogFactory.getLog(GetResource.class);
+  private static final Logger LOG = LoggerFactory.getLogger(GetResource.class);
 
   private static ResourceManager resourceManager;
 
@@ -59,8 +59,7 @@ public class GetResource {
   public Response getResource(@PathParam("resourcePath") String resourcePath,
       @Context HttpServletRequest req) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Received a resource request from agent"
-          + ", resourcePath=" + resourcePath);
+      LOG.debug("Received a resource request from agent, resourcePath={}", resourcePath);
     }
     File resourceFile = resourceManager.getResource(resourcePath);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/scheduler/AbstractLinearExecutionJob.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/AbstractLinearExecutionJob.java b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/AbstractLinearExecutionJob.java
index 0e6fc7b..4599dfa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/AbstractLinearExecutionJob.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/AbstractLinearExecutionJob.java
@@ -42,7 +42,7 @@ import org.slf4j.LoggerFactory;
  * and then it schedules the follow-up job.
  */
 public abstract class AbstractLinearExecutionJob implements ExecutionJob {
-  private static Logger LOG = LoggerFactory.getLogger(AbstractLinearExecutionJob.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AbstractLinearExecutionJob.class);
   protected ExecutionScheduleManager executionScheduleManager;
 
   public AbstractLinearExecutionJob(ExecutionScheduleManager executionScheduleManager) {
@@ -69,7 +69,7 @@ public abstract class AbstractLinearExecutionJob implements ExecutionJob {
   @Override
   public void execute(JobExecutionContext context) throws JobExecutionException {
     JobKey jobKey = context.getJobDetail().getKey();
-    LOG.debug("Executing linear job: " + jobKey);
+    LOG.debug("Executing linear job: {}", jobKey);
     JobDataMap jobDataMap = context.getMergedJobDataMap();
 
     if (!executionScheduleManager.continueOnMisfire(context)) {
@@ -109,7 +109,7 @@ public abstract class AbstractLinearExecutionJob implements ExecutionJob {
       }
     }
 
-    LOG.debug("Finished linear job: " + jobKey);
+    LOG.debug("Finished linear job: {}", jobKey);
 
     String nextJobName = jobDataMap.getString(NEXT_EXECUTION_JOB_NAME_KEY);
     String nextJobGroup = jobDataMap.getString(NEXT_EXECUTION_JOB_GROUP_KEY);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
index 91f642e..f3b3c41 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
@@ -243,7 +243,7 @@ public class ExecutionScheduleManager {
    * @param trigger
    */
   public void scheduleJob(Trigger trigger) {
-    LOG.debug("Scheduling job: " + trigger.getJobKey());
+    LOG.debug("Scheduling job: {}", trigger.getJobKey());
     if (isSchedulerAvailable()) {
       try {
         executionScheduler.scheduleJob(trigger);
@@ -334,7 +334,7 @@ public class ExecutionScheduleManager {
 
       try {
         executionScheduler.scheduleJob(trigger);
-        LOG.debug("Scheduled trigger next fire time: " + trigger.getNextFireTime());
+        LOG.debug("Scheduled trigger next fire time: {}", trigger.getNextFireTime());
       } catch (SchedulerException e) {
         LOG.error("Unable to schedule request execution.", e);
         throw new AmbariException(e.getMessage());
@@ -352,7 +352,7 @@ public class ExecutionScheduleManager {
 
       try {
         executionScheduler.scheduleJob(trigger);
-        LOG.debug("Scheduled trigger next fire time: " + trigger.getNextFireTime());
+        LOG.debug("Scheduled trigger next fire time: {}", trigger.getNextFireTime());
       } catch (SchedulerException e) {
         LOG.error("Unable to schedule request execution.", e);
         throw new AmbariException(e.getMessage());
@@ -412,8 +412,8 @@ public class ExecutionScheduleManager {
   }
 
   protected String getJobName(Long executionId, Long orderId) {
-    return BATCH_REQUEST_JOB_PREFIX + "-" + executionId.toString() + "-" +
-      orderId.toString();
+    return BATCH_REQUEST_JOB_PREFIX + "-" + executionId + "-" +
+      orderId;
   }
 
   /**
@@ -492,7 +492,7 @@ public class ExecutionScheduleManager {
           String jobName = getJobName(requestExecution.getId(),
             batchRequest.getOrderId());
 
-          LOG.debug("Deleting Job, jobName = " + jobName);
+          LOG.debug("Deleting Job, jobName = {}", jobName);
 
           try {
             executionScheduler.deleteJob(JobKey.jobKey(jobName,

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
index 6dae349..18c7390 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
@@ -69,7 +69,7 @@ public class ExecutionSchedulerImpl implements ExecutionScheduler {
       sf.initialize(properties);
     } catch (SchedulerException e) {
       LOG.warn("Failed to initialize Request Execution Scheduler properties !");
-      LOG.debug("Scheduler properties: \n" + properties);
+      LOG.debug("Scheduler properties: \n{}", properties);
       e.printStackTrace();
       return;
     }
@@ -125,7 +125,7 @@ public class ExecutionSchedulerImpl implements ExecutionScheduler {
     // Skip update check
     properties.setProperty("org.quartz.scheduler.skipUpdateCheck", "true");
 
-    LOG.debug("Using quartz properties: " + properties);
+    LOG.debug("Using quartz properties: {}", properties);
     return properties;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/CertificateManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/CertificateManager.java b/ambari-server/src/main/java/org/apache/ambari/server/security/CertificateManager.java
index 60d7a52..532c749 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/CertificateManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/CertificateManager.java
@@ -90,9 +90,9 @@ public class CertificateManager {
     String srvrKstrDir = configsMap.get(Configuration.SRVR_KSTR_DIR.getKey());
     String srvrCrtName = configsMap.get(Configuration.SRVR_CRT_NAME.getKey());
     File certFile = new File(srvrKstrDir + File.separator + srvrCrtName);
-    LOG.debug("srvrKstrDir = " + srvrKstrDir);
-    LOG.debug("srvrCrtName = " + srvrCrtName);
-    LOG.debug("certFile = " + certFile.getAbsolutePath());
+    LOG.debug("srvrKstrDir = {}", srvrKstrDir);
+    LOG.debug("srvrCrtName = {}", srvrCrtName);
+    LOG.debug("certFile = {}", certFile.getAbsolutePath());
 
     return certFile.exists();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java
index f2f4d4f..a1befa7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java
@@ -31,8 +31,8 @@ import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class SecurityFilter implements Filter {
 
@@ -40,7 +40,7 @@ public class SecurityFilter implements Filter {
   private static String CA = "/ca";
 
   private static Configuration config;
-  private final static Log LOG = LogFactory.getLog(SecurityFilter.class);
+  private final static Logger LOG = LoggerFactory.getLogger(SecurityFilter.class);
 
   @Override
   public void destroy() {
@@ -53,7 +53,7 @@ public class SecurityFilter implements Filter {
     HttpServletRequest req = (HttpServletRequest) serReq;
     String reqUrl = req.getRequestURL().toString();
 
-    LOG.debug("Filtering " + reqUrl + " for security purposes");
+    LOG.debug("Filtering {} for security purposes", reqUrl);
     if (serReq.getLocalPort() != config.getTwoWayAuthPort()) {
       if (isRequestAllowed(reqUrl)) {
         filtCh.doFilter(serReq, serResp);
@@ -63,7 +63,7 @@ public class SecurityFilter implements Filter {
       }
     }
 	  else {
-      LOG.debug("Request can continue on secure port " + serReq.getLocalPort());
+      LOG.debug("Request can continue on secure port {}", serReq.getLocalPort());
       filtCh.doFilter(serReq, serResp);
     }
   }
@@ -98,7 +98,7 @@ public class SecurityFilter implements Filter {
 
     } catch (Exception e) {
       LOG.warn("Exception while validating if request is secure " +
-        e.toString());
+        e);
     }
     LOG.warn("Request " + reqUrl + " doesn't match any pattern.");
     return false;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapBindAuthenticator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapBindAuthenticator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapBindAuthenticator.java
index a6ffa81..54cc978 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapBindAuthenticator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapBindAuthenticator.java
@@ -155,7 +155,7 @@ public class AmbariLdapBindAuthenticator extends AbstractLdapAuthenticator {
       throw new BadCredentialsException("The user search facility has not been set.");
     } else {
       if (LOG.isTraceEnabled()) {
-        LOG.trace("Searching for user with username {}: {}", username, userSearch.toString());
+        LOG.trace("Searching for user with username {}: {}", username, userSearch);
       }
 
       // Find the user data where the supplied username matches the value of the configured LDAP

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
index 0fff8c8..373552e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
@@ -55,7 +55,7 @@ public class AmbariPamAuthenticationProvider implements AuthenticationProvider {
   @Inject
   protected GroupDAO groupDAO;
 
-  private static Logger LOG = LoggerFactory.getLogger(AmbariPamAuthenticationProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AmbariPamAuthenticationProvider.class);
 
   private final Configuration configuration;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/jwt/JwtAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/jwt/JwtAuthenticationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/jwt/JwtAuthenticationFilter.java
index 258e9da..e27afdb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/jwt/JwtAuthenticationFilter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/jwt/JwtAuthenticationFilter.java
@@ -265,7 +265,7 @@ public class JwtAuthenticationFilter implements AmbariAuthenticationFilter {
     }
     String loginURL = authenticationProviderUrl + delimiter
         + originalUrlQueryParam + "="
-        + request.getRequestURL().toString();
+        + request.getRequestURL();
     return loginURL;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/MasterKeyServiceImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/MasterKeyServiceImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/MasterKeyServiceImpl.java
index 4763cc2..3b38856 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/MasterKeyServiceImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/MasterKeyServiceImpl.java
@@ -34,12 +34,12 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.utils.AmbariPath;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.ntp.TimeStamp;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class MasterKeyServiceImpl implements MasterKeyService {
-  private static final Log LOG = LogFactory.getLog(MasterKeyServiceImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MasterKeyServiceImpl.class);
   private static final String MASTER_PASSPHRASE = "masterpassphrase";
   private static final String MASTER_PERSISTENCE_TAG_PREFIX = "#1.0# ";
   private static final AESEncryptor aes = new AESEncryptor(MASTER_PASSPHRASE);
@@ -138,7 +138,7 @@ public class MasterKeyServiceImpl implements MasterKeyService {
    * @return true if the master key was written to the specified file; otherwise false
    */
   public static boolean initializeMasterKeyFile(File masterKeyFile, String masterKey) {
-    LOG.debug(String.format("Persisting master key into %s", masterKeyFile.getAbsolutePath()));
+    LOG.debug("Persisting master key into {}", masterKeyFile.getAbsolutePath());
 
     EncryptionResult atom = null;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateDownload.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateDownload.java b/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateDownload.java
index b1ad05f..eddc6bb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateDownload.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateDownload.java
@@ -24,14 +24,14 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 
 import org.apache.ambari.server.security.CertificateManager;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 
 @Path("/cert/ca")
 public class CertificateDownload {
-  private static Log LOG = LogFactory.getLog(CertificateDownload.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CertificateDownload.class);
   private static CertificateManager certMan;
 
   @Inject

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateSign.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateSign.java b/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateSign.java
index 9f4cdd5..5e2d45a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateSign.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/CertificateSign.java
@@ -30,13 +30,13 @@ import javax.ws.rs.core.MediaType;
 import org.apache.ambari.server.security.CertificateManager;
 import org.apache.ambari.server.security.SignCertResponse;
 import org.apache.ambari.server.security.SignMessage;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 @Path("/certs")
 public class CertificateSign {
-  private static Log LOG = LogFactory.getLog(CertificateSign.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CertificateSign.class);
   private static CertificateManager certMan;
 
   @Inject

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/ConnectionInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/ConnectionInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/ConnectionInfo.java
index 6bce0b9..ac2e536 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/ConnectionInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/unsecured/rest/ConnectionInfo.java
@@ -26,15 +26,15 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 
 
 @Path("/connection_info")
 public class ConnectionInfo {
-    private static Log LOG = LogFactory.getLog(ConnectionInfo.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ConnectionInfo.class);
     private static HashMap<String,String> response= new HashMap<>();
     private static Configuration conf;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
index 8903fa1..f7d6060 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
@@ -54,13 +54,13 @@ import org.apache.ambari.server.security.InternalSSLSocketFactoryTrusting;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.velocity.VelocityContext;
 import org.apache.velocity.app.Velocity;
 import org.apache.velocity.exception.MethodInvocationException;
 import org.apache.velocity.exception.ParseErrorException;
 import org.apache.velocity.exception.ResourceNotFoundException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.reflect.TypeToken;
 import com.google.gson.Gson;
@@ -71,7 +71,7 @@ import com.google.inject.Inject;
  */
 public class ADKerberosOperationHandler extends KerberosOperationHandler {
 
-  private static Log LOG = LogFactory.getLog(ADKerberosOperationHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ADKerberosOperationHandler.class);
 
   private static final String LDAP_CONTEXT_FACTORY_CLASS = "com.sun.jndi.ldap.LdapCtxFactory";
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
index 9ee5f11..a23ab5d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
@@ -288,7 +288,7 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
                 commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
               }
             } else {
-              LOG.debug(String.format("Skipping previously processed keytab for %s on host %s", evaluatedPrincipal, hostName));
+              LOG.debug("Skipping previously processed keytab for {} on host {}", evaluatedPrincipal, hostName);
             }
           }
         }
@@ -320,7 +320,7 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
   public Keytab createKeytab(String principal, String password, Integer keyNumber,
                              KerberosOperationHandler operationHandler, boolean checkCache,
                              boolean canCache, ActionLog actionLog) throws AmbariException {
-    LOG.debug("Creating keytab for " + principal + " with kvno " + keyNumber);
+    LOG.debug("Creating keytab for {} with kvno {}", principal, keyNumber);
     Keytab keytab = null;
 
     // Possibly get the keytab from the cache
@@ -366,7 +366,7 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
 
             if (previousCachedFilePath != null) {
               if (!new File(previousCachedFilePath).delete()) {
-                LOG.debug(String.format("Failed to remove orphaned cache file %s", previousCachedFilePath));
+                LOG.debug("Failed to remove orphaned cache file {}", previousCachedFilePath);
               }
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
index fc2de99..2b3a0ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
@@ -133,7 +133,7 @@ public class DestroyPrincipalsServerAction extends KerberosServerAction {
             // If a cached  keytabs file exists for this principal, delete it.
             if (cachedKeytabPath != null) {
               if (!new File(cachedKeytabPath).delete()) {
-                LOG.debug(String.format("Failed to remove cached keytab for %s", evaluatedPrincipal));
+                LOG.debug("Failed to remove cached keytab for {}", evaluatedPrincipal);
               }
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/IPAKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/IPAKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/IPAKerberosOperationHandler.java
index 54b4cd3..9a6a07e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/IPAKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/IPAKerberosOperationHandler.java
@@ -405,7 +405,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
       ShellCommandUtil.Result result = invokeIpa(String.format("service-show %s", principal));
       // ignore the keytab but set the password for this principal
       if (result.isSuccessful() && result.getStdout().contains("Keytab: False")) {
-        LOG.debug("Found service principal " + principal + " without password/keytab. Setting one");
+        LOG.debug("Found service principal {} without password/keytab. Setting one", principal);
         createKeytab(principal, password, 0);
       }
     }
@@ -505,7 +505,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
           stderr.read(err_data);
           err.append(err_data);
         }
-        throw new KerberosOperationException("No answer data available from stdin stream. STDERR: " + err.toString());
+        throw new KerberosOperationException("No answer data available from stdin stream. STDERR: " + err);
       }
       count++;
     }
@@ -534,12 +534,12 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
     BufferedReader stderr = null;
     OutputStreamWriter out = null;
 
-    LOG.debug("Updating password for: " + principal);
+    LOG.debug("Updating password for: {}", principal);
 
     UUID uuid = UUID.randomUUID();
     String fileName = System.getProperty("java.io.tmpdir") +
             File.pathSeparator +
-            "krb5cc_" + uuid.toString();
+            "krb5cc_" + uuid;
 
     try {
       ShellCommandUtil.Result result = invokeIpa(String.format("user-mod %s --random", principal));
@@ -665,7 +665,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
     command.add(query);
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug(String.format("Executing: %s", createCleanCommand(command)));
+      LOG.debug("Executing: {}", createCleanCommand(command));
     }
 
     List<String> fixedCommand = fixCommandList(command);
@@ -962,7 +962,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
     UUID uuid = UUID.randomUUID();
     String fileName = System.getProperty("java.io.tmpdir") +
             File.pathSeparator +
-            "ambari." + uuid.toString();
+            "ambari." + uuid;
 
     // TODO: add ciphers
     List<String> command = new ArrayList<>();
@@ -1007,7 +1007,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
       UUID uuid = UUID.randomUUID();
       fileName = System.getProperty("java.io.tmpdir") +
               File.pathSeparator +
-              "krb5cc_" + uuid.toString();
+              "krb5cc_" + uuid;
       env.put("KRB5CCNAME", String.format("FILE:%s", fileName));
 
       init(credentials, fileName);
@@ -1049,7 +1049,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
       try {
         String credentialsCache = String.format("FILE:%s", fileName);
 
-        LOG.debug("start subprocess " + executableKinit + " " + credentials.getPrincipal());
+        LOG.debug("start subprocess {} {}", executableKinit, credentials.getPrincipal());
         process = Runtime.getRuntime().exec(new String[]{executableKinit, "-c", credentialsCache, credentials.getPrincipal()});
         reader = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8));
         osw = new OutputStreamWriter(process.getOutputStream());
@@ -1073,7 +1073,7 @@ public class IPAKerberosOperationHandler extends KerberosOperationHandler {
         }
 
         String line = sb.toString();
-        LOG.debug("Reading a line: " + line);
+        LOG.debug("Reading a line: {}", line);
         if (!line.startsWith("Password")) {
           throw new KerberosOperationException("Unexpected response from kinit while trying to get ticket for "
                   + credentials.getPrincipal() + " got: " + line);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
index 55d7366..0997f65 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
@@ -454,7 +454,7 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
     command.add(query);
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug(String.format("Executing: %s", command));
+      LOG.debug("Executing: {}", command);
     }
 
     int retryCount = configuration.getKerberosOperationRetries();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
index 17bb3f8..ec4d383 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
@@ -86,7 +86,7 @@ import com.google.inject.Provider;
  */
 public class ConfigureAction extends AbstractUpgradeServerAction {
 
-  private static Logger LOG = LoggerFactory.getLogger(ConfigureAction.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigureAction.class);
 
   /**
    * Used to lookup the cluster.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index db0da9c..3c2dbd6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -319,7 +319,7 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
     } catch (Exception e) {
       StringWriter sw = new StringWriter();
       e.printStackTrace(new PrintWriter(sw));
-      errSB.append(sw.toString());
+      errSB.append(sw);
 
       return createCommandReport(-1, HostRoleStatus.FAILED, "{}", outSB.toString(), errSB.toString());
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpdateDesiredStackAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpdateDesiredStackAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpdateDesiredStackAction.java
index f55ffd0..8a4820d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpdateDesiredStackAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpdateDesiredStackAction.java
@@ -181,7 +181,7 @@ public class UpdateDesiredStackAction extends AbstractUpgradeServerAction {
     } catch (Exception e) {
       StringWriter sw = new StringWriter();
       e.printStackTrace(new PrintWriter(sw));
-      err.append(sw.toString());
+      err.append(sw);
 
       return createCommandReport(-1, HostRoleStatus.FAILED, "{}", out.toString(), err.toString());
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionDirectory.java
index 1f6ce2c..131318b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionDirectory.java
@@ -133,7 +133,7 @@ public class ExtensionDirectory extends StackDefinitionDirectory {
     //todo: is it ok for this file not to exist?
     if (extensionMetaInfoFile.exists()) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Reading extension version metainfo from file " + extensionMetaInfoFile.getAbsolutePath());
+        LOG.debug("Reading extension version metainfo from file {}", extensionMetaInfoFile.getAbsolutePath());
       }
 
       try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionModule.java
index ee4245a..409cfe6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionModule.java
@@ -379,9 +379,7 @@ public class ExtensionModule extends BaseModule<ExtensionModule, ExtensionInfo>
 
     id = String.format("%s:%s", extensionInfo.getName(), extensionInfo.getVersion());
 
-    LOG.debug("Adding new extension to known extensions"
-        + ", extensionName = " + extensionInfo.getName()
-        + ", extensionVersion = " + extensionInfo.getVersion());
+    LOG.debug("Adding new extension to known extensions, extensionName = {}, extensionVersion = {}", extensionInfo.getName(), extensionInfo.getVersion());
 
 
     //todo: give additional thought on handling missing metainfo.xml

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
index d38e4a3..427a5f5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
@@ -52,7 +52,7 @@ import com.google.common.reflect.TypeToken;
 
 public class MasterHostResolver {
 
-  private static Logger LOG = LoggerFactory.getLogger(MasterHostResolver.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MasterHostResolver.class);
 
   private final UpgradeContext m_upgradeContext;
   private final Cluster m_cluster;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
index e4dd853..abad7ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
@@ -421,7 +421,7 @@ public abstract class ServiceDirectory extends StackDefinitionDirectory {
       roleCommandOrder = new StackRoleCommandOrder(result);
 
       if (LOG.isDebugEnabled() && rcoFile != null) {
-        LOG.debug("Role Command Order for " + rcoFile.getAbsolutePath());
+        LOG.debug("Role Command Order for {}", rcoFile.getAbsolutePath());
         roleCommandOrder.printRoleCommandOrder(LOG);
       }
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
index 7dbc639..e41d3b2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
@@ -629,9 +629,9 @@ public class ServiceModule extends BaseModule<ServiceModule, ServiceInfo> implem
    * Ensure that all default type attributes are set.
    */
   private void finalizeConfiguration() {
-    LOG.debug(String.format("Finalize config, number of configuration modules %s", configurationModules.size()));
+    LOG.debug("Finalize config, number of configuration modules {}", configurationModules.size());
     hasConfigs = !(configurationModules.isEmpty());
-    LOG.debug(String.format("Finalize config, hasConfigs %s", hasConfigs));
+    LOG.debug("Finalize config, hasConfigs {}", hasConfigs);
 
     for (ConfigurationModule config : configurationModules.values()) {
       ConfigurationInfo configInfo = config.getModuleInfo();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
index 16c8fd6..23fd0a9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
@@ -286,8 +286,7 @@ public class StackDirectory extends StackDefinitionDirectory {
       hooksDir = getStackDirName() + File.separator + getName() +
           File.separator + HOOKS_FOLDER_NAME;
     } else {
-      LOG.debug("Hooks folder " + getAbsolutePath() + File.separator +
-          HOOKS_FOLDER_NAME + " does not exist");
+      LOG.debug("Hooks folder {}{}" + HOOKS_FOLDER_NAME + " does not exist", getAbsolutePath(), File.separator);
     }
 
     if (subDirs.contains(AmbariMetaInfo.RCO_FILE_NAME)) {
@@ -342,7 +341,7 @@ public class StackDirectory extends StackDefinitionDirectory {
     //todo: is it ok for this file not to exist?
     if (stackMetaInfoFile.exists()) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Reading stack version metainfo from file " + stackMetaInfoFile.getAbsolutePath());
+        LOG.debug("Reading stack version metainfo from file {}", stackMetaInfoFile.getAbsolutePath());
       }
 
       try {
@@ -492,7 +491,7 @@ public class StackDirectory extends StackDefinitionDirectory {
       }
       roleCommandOrder = new StackRoleCommandOrder(result);
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Role Command Order for " + rcoFilePath);
+        LOG.debug("Role Command Order for {}", rcoFilePath);
         roleCommandOrder.printRoleCommandOrder(LOG);
       }
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
index d9649b9..57f7cb4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
@@ -465,8 +465,7 @@ public class StackManager {
 
       String commonServicesRootAbsolutePath = commonServicesRoot.getAbsolutePath();
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Loading common services information"
-            + ", commonServicesRoot = " + commonServicesRootAbsolutePath);
+        LOG.debug("Loading common services information, commonServicesRoot = {}", commonServicesRootAbsolutePath);
       }
 
       if (!commonServicesRoot.isDirectory() && !commonServicesRoot.exists()) {
@@ -488,8 +487,7 @@ public class StackManager {
 
     String stackRootAbsPath = stackRoot.getAbsolutePath();
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Loading stack information"
-          + ", stackRoot = " + stackRootAbsPath);
+      LOG.debug("Loading stack information, stackRoot = {}", stackRootAbsPath);
     }
 
     if (!stackRoot.isDirectory() && !stackRoot.exists()) {
@@ -544,8 +542,7 @@ public class StackManager {
 
     String extensionRootAbsPath = extensionRoot.getAbsolutePath();
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Loading extension information"
-          + ", extensionRoot = " + extensionRootAbsPath);
+      LOG.debug("Loading extension information, extensionRoot = {}", extensionRootAbsPath);
     }
 
     //For backwards compatibility extension directory may not exist

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
index 5dd7656..c4fab99 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -554,9 +554,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
 
     id = String.format("%s:%s", stackInfo.getName(), stackInfo.getVersion());
 
-    LOG.debug("Adding new stack to known stacks"
-        + ", stackName = " + stackInfo.getName()
-        + ", stackVersion = " + stackInfo.getVersion());
+    LOG.debug("Adding new stack to known stacks, stackName = {}, stackVersion = {}", stackInfo.getName(), stackInfo.getVersion());
 
     //todo: give additional thought on handling missing metainfo.xml
     StackMetainfoXml smx = stackDirectory.getMetaInfoFile();
@@ -1014,7 +1012,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
         String name = groups.get(index).name;
         if (name.equals(group.addAfterGroup)) {
           groups.add(index + 1, group);
-          LOG.debug("Added group/after: " + group.name + "/" + group.addAfterGroup);
+          LOG.debug("Added group/after: {}/{}", group.name, group.addAfterGroup);
           return true;
         }
       }
@@ -1126,10 +1124,8 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
     if (null != rxml) {
       stackInfo.setRepositoryXml(rxml);
 
-      LOG.debug("Adding repositories to stack" +
-          ", stackName=" + stackInfo.getName() +
-          ", stackVersion=" + stackInfo.getVersion() +
-          ", repoFolder=" + stackDirectory.getRepoDir());
+      LOG.debug("Adding repositories to stack, stackName={}, stackVersion={}, repoFolder={}",
+        stackInfo.getName(), stackInfo.getVersion(), stackDirectory.getRepoDir());
 
       stackRepos = rxml.getRepositories();
 
@@ -1265,8 +1261,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
     }
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Adding repo to stack"
-          + ", repoInfo=" + ri.toString());
+      LOG.debug("Adding repo to stack, repoInfo={}", ri);
     }
     return ri;
   }
@@ -1293,8 +1288,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
 
     stackInfo.getRoleCommandOrder().merge(service.getModuleInfo().getRoleCommandOrder(), true);
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Role Command Order for " + stackInfo.getName() + "-" + stackInfo.getVersion() +
-        " service " + service.getModuleInfo().getName());
+      LOG.debug("Role Command Order for {}-{} service {}", stackInfo.getName(), stackInfo.getVersion(), service.getModuleInfo().getName());
       stackInfo.getRoleCommandOrder().printRoleCommandOrder(LOG);
     }
   }
@@ -1306,7 +1300,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
   private void validateBulkCommandComponents(Map<String, StackModule> allStacks){
     if (null != stackInfo) {
       String currentStackId = stackInfo.getName() + StackManager.PATH_DELIMITER + stackInfo.getVersion();
-      LOG.debug("Validate bulk command components for: " + currentStackId);
+      LOG.debug("Validate bulk command components for: {}", currentStackId);
       StackModule currentStack = allStacks.get(currentStackId);
       if (null != currentStack){
         for (ServiceModule serviceModule : currentStack.getServiceModules().values()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
index d6d26c9..b508cd7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
@@ -42,7 +42,7 @@ import com.google.inject.assistedinject.AssistedInject;
 
 public class RoleGraph {
 
-  private static Logger LOG = LoggerFactory.getLogger(RoleGraph.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RoleGraph.class);
 
   Map<String, RoleGraphNode> graph = null;
   private RoleCommandOrder roleDependencies;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
index f3fbc63..8cf3afd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/CheckHelper.java
@@ -50,7 +50,7 @@ public class CheckHelper {
   /**
    * Log.
    */
-  private static Logger LOG = LoggerFactory.getLogger(CheckHelper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CheckHelper.class);
 
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index a2af9ec..5f85e38 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -225,11 +225,8 @@ public class ServiceComponentImpl implements ServiceComponent {
   @Override
   public void setRecoveryEnabled(boolean recoveryEnabled) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Setting RecoveryEnabled of Component" + ", clusterName="
-          + service.getCluster().getClusterName() + ", clusterId="
-          + service.getCluster().getClusterId() + ", serviceName=" + service.getName()
-          + ", componentName=" + getName() + ", oldRecoveryEnabled=" + isRecoveryEnabled()
-          + ", newRecoveryEnabled=" + recoveryEnabled);
+      LOG.debug("Setting RecoveryEnabled of Component, clusterName={}, clusterId={}, serviceName={}, componentName={}, oldRecoveryEnabled={}, newRecoveryEnabled={}",
+        service.getCluster().getClusterName(), service.getCluster().getClusterId(), service.getName(), getName(), isRecoveryEnabled(), recoveryEnabled);
     }
 
     ServiceComponentDesiredStateEntity desiredStateEntity = serviceComponentDesiredStateDAO.findById(
@@ -289,11 +286,8 @@ public class ServiceComponentImpl implements ServiceComponent {
       // TODO validation
       // TODO ensure host belongs to cluster
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Adding a ServiceComponentHost to ServiceComponent" + ", clusterName="
-            + service.getCluster().getClusterName() + ", clusterId="
-            + service.getCluster().getClusterId() + ", serviceName=" + service.getName()
-            + ", serviceComponentName=" + getName() + ", hostname=" + hostComponent.getHostName()
-            + ", recoveryEnabled=" + isRecoveryEnabled());
+        LOG.debug("Adding a ServiceComponentHost to ServiceComponent, clusterName={}, clusterId={}, serviceName={}, serviceComponentName={}, hostname={}, recoveryEnabled={}",
+          service.getCluster().getClusterName(), service.getCluster().getClusterId(), service.getName(), getName(), hostComponent.getHostName(), isRecoveryEnabled());
       }
 
       if (hostComponents.containsKey(hostComponent.getHostName())) {
@@ -350,11 +344,8 @@ public class ServiceComponentImpl implements ServiceComponent {
   @Override
   public void setDesiredState(State state) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Setting DesiredState of Service" + ", clusterName="
-          + service.getCluster().getClusterName() + ", clusterId="
-          + service.getCluster().getClusterId() + ", serviceName=" + service.getName()
-          + ", serviceComponentName=" + getName() + ", oldDesiredState=" + getDesiredState()
-          + ", newDesiredState=" + state);
+      LOG.debug("Setting DesiredState of Service, clusterName={}, clusterId={}, serviceName={}, serviceComponentName={}, oldDesiredState={}, newDesiredState={}",
+        service.getCluster().getClusterName(), service.getCluster().getClusterId(), service.getName(), getName(), getDesiredState(), state);
     }
 
     ServiceComponentDesiredStateEntity desiredStateEntity = serviceComponentDesiredStateDAO.findById(

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
index fe37a93..da1702d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
@@ -266,11 +266,8 @@ public class ServiceImpl implements Service {
   @Override
   public void setDesiredState(State state) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Setting DesiredState of Service" + ", clusterName="
-          + cluster.getClusterName() + ", clusterId="
-          + cluster.getClusterId() + ", serviceName=" + getName()
-          + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
-          + state);
+      LOG.debug("Setting DesiredState of Service, clusterName={}, clusterId={}, serviceName={}, oldDesiredState={}, newDesiredState={}",
+        cluster.getClusterName(), cluster.getClusterId(), getName(), getDesiredState(), state);
     }
 
     ServiceDesiredStateEntity serviceDesiredStateEntity = getServiceDesiredStateEntity();
@@ -291,11 +288,8 @@ public class ServiceImpl implements Service {
     }
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Setting DesiredSecurityState of Service" + ", clusterName="
-          + cluster.getClusterName() + ", clusterId="
-          + cluster.getClusterId() + ", serviceName=" + getName()
-          + ", oldDesiredSecurityState=" + getSecurityState()
-          + ", newDesiredSecurityState=" + securityState);
+      LOG.debug("Setting DesiredSecurityState of Service, clusterName={}, clusterId={}, serviceName={}, oldDesiredSecurityState={}, newDesiredSecurityState={}",
+        cluster.getClusterName(), cluster.getClusterId(), getName(), getSecurityState(), securityState);
     }
     ServiceDesiredStateEntity serviceDesiredStateEntity = getServiceDesiredStateEntity();
     serviceDesiredStateEntity.setSecurityState(securityState);
@@ -429,11 +423,8 @@ public class ServiceImpl implements Service {
   @Override
   public void setCredentialStoreEnabled(boolean credentialStoreEnabled) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Setting CredentialStoreEnabled of Service" + ", clusterName="
-              + cluster.getClusterName() + ", clusterId="
-              + cluster.getClusterId() + ", serviceName=" + getName()
-              + ", oldCredentialStoreEnabled=" + isCredentialStoreEnabled()
-              + ", newCredentialStoreEnabled=" + credentialStoreEnabled);
+      LOG.debug("Setting CredentialStoreEnabled of Service, clusterName={}, clusterId={}, serviceName={}, oldCredentialStoreEnabled={}, newCredentialStoreEnabled={}",
+        cluster.getClusterName(), cluster.getClusterId(), getName(), isCredentialStoreEnabled(), credentialStoreEnabled);
     }
 
     ServiceDesiredStateEntity desiredStateEntity = getServiceDesiredStateEntity();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index ca7794e..1494488 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -295,7 +295,7 @@ public class StackInfo implements Comparable<StackInfo>, Validable {
       sb.append("\n\t\tRepositories:");
       for (RepositoryInfo repository : repositories) {
         sb.append("\t\t");
-        sb.append(repository.toString());
+        sb.append(repository);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/action/ActionImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/action/ActionImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/action/ActionImpl.java
index b42a0b6..35b559e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/action/ActionImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/action/ActionImpl.java
@@ -26,12 +26,12 @@ import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.fsm.SingleArcTransition;
 import org.apache.ambari.server.state.fsm.StateMachine;
 import org.apache.ambari.server.state.fsm.StateMachineFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ActionImpl implements Action {
 
-  private static final Log LOG = LogFactory.getLog(ActionImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ActionImpl.class);
 
   private final Lock readLock;
   private final Lock writeLock;
@@ -125,10 +125,8 @@ public class ActionImpl implements Action {
       ActionProgressUpdateEvent e = (ActionProgressUpdateEvent) event;
       action.setLastUpdateTime(e.getProgressUpdateTime());
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Progress update for Action"
-            + ", actionId=" + action.getId()
-            + ", startTime=" + action.getStartTime()
-            + ", lastUpdateTime=" + action.getLastUpdateTime());
+        LOG.debug("Progress update for Action, actionId={}, startTime={}, lastUpdateTime={}",
+          action.getId(), action.getStartTime(), action.getLastUpdateTime());
       }
     }
   }
@@ -193,8 +191,7 @@ public class ActionImpl implements Action {
   public void handleEvent(ActionEvent event)
       throws InvalidStateTransitionException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Handling Action event, eventType=" + event.getType().name()
-          + ", event=" + event.toString());
+      LOG.debug("Handling Action event, eventType={}, event={}", event.getType().name(), event);
     }
     ActionState oldState = getState();
     try {
@@ -215,12 +212,8 @@ public class ActionImpl implements Action {
     }
     if (oldState != getState()) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Action transitioned to a new state"
-            + ", actionId=" + this.getId()
-            + ", oldState=" + oldState
-            + ", currentState=" + getState()
-            + ", eventType=" + event.getType().name()
-            + ", event=" + event);
+        LOG.debug("Action transitioned to a new state, actionId={}, oldState={}, currentState={}, eventType={}, event={}",
+          getId(), oldState, getState(), event.getType().name(), event);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8f33f1a..a4bf815 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -465,16 +465,13 @@ public class ClusterImpl implements Cluster {
       }
     }
 
-    LOG.debug("Adding a new Config group" + ", clusterName = "
-      + getClusterName() + ", groupName = " + configGroup.getName()
-      + ", tag = " + configGroup.getTag() + " with hosts " + hostList);
+    LOG.debug("Adding a new Config group, clusterName = {}, groupName = {}, tag = {} with hosts {}",
+      getClusterName(), configGroup.getName(), configGroup.getTag(), hostList);
 
     if (clusterConfigGroups.containsKey(configGroup.getId())) {
       // The loadConfigGroups will load all groups to memory
-      LOG.debug("Config group already exists" + ", clusterName = "
-          + getClusterName() + ", groupName = " + configGroup.getName()
-          + ", groupId = " + configGroup.getId() + ", tag = "
-          + configGroup.getTag());
+      LOG.debug("Config group already exists, clusterName = {}, groupName = {}, groupId = {}, tag = {}",
+        getClusterName(), configGroup.getName(), configGroup.getId(), configGroup.getTag());
     } else {
       clusterConfigGroups.put(configGroup.getId(), configGroup);
     }
@@ -509,9 +506,8 @@ public class ClusterImpl implements Cluster {
         + requestExecution.getId() + ", description = " + requestExecution.getDescription());
 
     if (requestExecutions.containsKey(requestExecution.getId())) {
-      LOG.debug(
-          "Request schedule already exists" + ", clusterName = " + getClusterName() + ", id = "
-              + requestExecution.getId() + ", description = " + requestExecution.getDescription());
+      LOG.debug("Request schedule already exists, clusterName = {}, id = {}, description = {}",
+        getClusterName(), requestExecution.getId(), requestExecution.getDescription());
     } else {
       requestExecutions.put(requestExecution.getId(), requestExecution);
     }
@@ -542,9 +538,8 @@ public class ClusterImpl implements Cluster {
       throw new ConfigGroupNotFoundException(getClusterName(), id.toString());
     }
 
-    LOG.debug("Deleting Config group" + ", clusterName = " + getClusterName()
-        + ", groupName = " + configGroup.getName() + ", groupId = "
-        + configGroup.getId() + ", tag = " + configGroup.getTag());
+    LOG.debug("Deleting Config group, clusterName = {}, groupName = {}, groupId = {}, tag = {}",
+      getClusterName(), configGroup.getName(), configGroup.getId(), configGroup.getTag());
 
     configGroup.delete();
     clusterConfigGroups.remove(id);
@@ -666,10 +661,8 @@ public class ClusterImpl implements Cluster {
     }
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Adding a new ServiceComponentHost" + ", clusterName="
-          + getClusterName() + ", clusterId=" + getClusterId()
-          + ", serviceName=" + serviceName + ", serviceComponentName"
-          + componentName + ", hostname= " + hostname);
+      LOG.debug("Adding a new ServiceComponentHost, clusterName={}, clusterId={}, serviceName={}, serviceComponentName{}, hostname= {}",
+        getClusterName(), getClusterId(), serviceName, componentName, hostname);
     }
 
     serviceComponentHosts.get(serviceName).get(componentName).put(hostname,
@@ -742,10 +735,8 @@ public class ClusterImpl implements Cluster {
     }
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Removing a ServiceComponentHost" + ", clusterName="
-          + getClusterName() + ", clusterId=" + getClusterId()
-          + ", serviceName=" + serviceName + ", serviceComponentName"
-          + componentName + ", hostname= " + hostname);
+      LOG.debug("Removing a ServiceComponentHost, clusterName={}, clusterId={}, serviceName={}, serviceComponentName{}, hostname= {}",
+        getClusterName(), getClusterId(), serviceName, componentName, hostname);
     }
 
     serviceComponentHosts.get(serviceName).get(componentName).remove(hostname);
@@ -830,8 +821,7 @@ public class ClusterImpl implements Cluster {
   @Override
   public void addService(Service service) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Adding a new Service" + ", clusterName=" + getClusterName() + ", clusterId="
-          + getClusterId() + ", serviceName=" + service.getName());
+      LOG.debug("Adding a new Service, clusterName={}, clusterId={}, serviceName={}", getClusterName(), getClusterId(), service.getName());
     }
     services.put(service.getName(), service);
   }
@@ -894,10 +884,8 @@ public class ClusterImpl implements Cluster {
     clusterGlobalLock.writeLock().lock();
     try {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Changing DesiredStackVersion of Cluster" + ", clusterName="
-            + getClusterName() + ", clusterId=" + getClusterId()
-            + ", currentDesiredStackVersion=" + desiredStackVersion
-            + ", newDesiredStackVersion=" + stackId);
+        LOG.debug("Changing DesiredStackVersion of Cluster, clusterName={}, clusterId={}, currentDesiredStackVersion={}, newDesiredStackVersion={}",
+          getClusterName(), getClusterId(), desiredStackVersion, stackId);
       }
 
       desiredStackVersion = stackId;
@@ -2376,7 +2364,8 @@ public class ClusterImpl implements Cluster {
 
         LOG.info("Setting {} with version tag {} created on {} to selected for stack {}",
             entity.getType(), entity.getTag(), new Date(entity.getTimestamp()),
-            stackId.toString());
+          stackId
+        );
       }
 
       // since the entities which were modified came from the cluster entity's

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
index 5f1e934..595ce4a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
@@ -305,9 +305,7 @@ public class ClustersImpl implements Clusters {
       throw new HostNotFoundException(hostname);
     }
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Looking up clusters for hostname"
-          + ", hostname=" + hostname
-          + ", mappedClusters=" + clusters.size());
+      LOG.debug("Looking up clusters for hostname, hostname={}, mappedClusters={}", hostname, clusters.size());
     }
     return Collections.unmodifiableSet(clusters);
 
@@ -397,7 +395,7 @@ public class ClustersImpl implements Clusters {
         Collections.newSetFromMap(new ConcurrentHashMap<Cluster, Boolean>()));
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Adding a host to Clusters" + ", hostname=" + hostname);
+      LOG.debug("Adding a host to Clusters, hostname={}", hostname);
     }
 
     // publish the event

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
index 04aba31..aa87f7d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
@@ -327,7 +327,7 @@ public class HostImpl implements Host {
         agentVersion = e.agentVersion.getVersion();
       }
       LOG.info("Received host registration, host="
-        + e.hostInfo.toString()
+        + e.hostInfo
         + ", registrationTime=" + e.registrationTime
         + ", agentVersion=" + agentVersion);
 
@@ -355,9 +355,7 @@ public class HostImpl implements Host {
     public void transition(HostImpl host, HostEvent event) {
       HostStatusUpdatesReceivedEvent e = (HostStatusUpdatesReceivedEvent)event;
       // TODO Audit logs
-      LOG.debug("Host transition to host status updates received state"
-          + ", host=" + e.getHostName()
-          + ", heartbeatTime=" + e.getTimestamp());
+      LOG.debug("Host transition to host status updates received state, host={}, heartbeatTime={}", e.getHostName(), e.getTimestamp());
       host.setHealthStatus(new HostHealthStatus(HealthStatus.HEALTHY,
         host.getHealthStatus().getHealthReport()));
     }
@@ -404,9 +402,7 @@ public class HostImpl implements Host {
       HostHealthyHeartbeatEvent e = (HostHealthyHeartbeatEvent) event;
       host.setLastHeartbeatTime(e.getHeartbeatTime());
       // TODO Audit logs
-      LOG.debug("Host transitioned to a healthy state"
-              + ", host=" + e.getHostName()
-              + ", heartbeatTime=" + e.getHeartbeatTime());
+      LOG.debug("Host transitioned to a healthy state, host={}, heartbeatTime={}", e.getHostName(), e.getHeartbeatTime());
       host.setHealthStatus(new HostHealthStatus(HealthStatus.HEALTHY, host.getHealthStatus().getHealthReport()));
     }
   }
@@ -419,10 +415,8 @@ public class HostImpl implements Host {
       HostUnhealthyHeartbeatEvent e = (HostUnhealthyHeartbeatEvent) event;
       host.setLastHeartbeatTime(e.getHeartbeatTime());
       // TODO Audit logs
-      LOG.debug("Host transitioned to an unhealthy state"
-          + ", host=" + e.getHostName()
-          + ", heartbeatTime=" + e.getHeartbeatTime()
-          + ", healthStatus=" + e.getHealthStatus());
+      LOG.debug("Host transitioned to an unhealthy state, host={}, heartbeatTime={}, healthStatus={}",
+        e.getHostName(), e.getHeartbeatTime(), e.getHealthStatus());
       host.setHealthStatus(e.getHealthStatus());
     }
   }
@@ -434,9 +428,7 @@ public class HostImpl implements Host {
     public void transition(HostImpl host, HostEvent event) {
       HostHeartbeatLostEvent e = (HostHeartbeatLostEvent) event;
       // TODO Audit logs
-      LOG.debug("Host transitioned to heartbeat lost state"
-          + ", host=" + e.getHostName()
-          + ", lastHeartbeatTime=" + host.getLastHeartbeatTime());
+      LOG.debug("Host transitioned to heartbeat lost state, host={}, lastHeartbeatTime={}", e.getHostName(), host.getLastHeartbeatTime());
       host.setHealthStatus(new HostHealthStatus(HealthStatus.UNKNOWN, host.getHealthStatus().getHealthReport()));
 
       host.topologyManager.onHostHeartBeatLost(host);
@@ -568,8 +560,7 @@ public class HostImpl implements Host {
   public void handleEvent(HostEvent event)
       throws InvalidStateTransitionException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Handling Host event, eventType=" + event.getType().name()
-          + ", event=" + event.toString());
+      LOG.debug("Handling Host event, eventType={}, event={}", event.getType().name(), event);
     }
     HostState oldState = getState();
     try {
@@ -590,12 +581,8 @@ public class HostImpl implements Host {
     }
     if (oldState != getState()) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Host transitioned to a new state"
-            + ", host=" + getHostName()
-            + ", oldState=" + oldState
-            + ", currentState=" + getState()
-            + ", eventType=" + event.getType().name()
-            + ", event=" + event);
+        LOG.debug("Host transitioned to a new state, host={}, oldState={}, currentState={}, eventType={}, event={}",
+          getHostName(), oldState, getState(), event.getType().name(), event);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/state/stack/ConfigUpgradePack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/ConfigUpgradePack.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/ConfigUpgradePack.java
index c5a7e18..db02e95 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/ConfigUpgradePack.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/ConfigUpgradePack.java
@@ -58,7 +58,7 @@ public class ConfigUpgradePack {
    */
   private Map<String, ConfigUpgradeChangeDefinition> changesById;
 
-  private static Logger LOG = LoggerFactory.getLogger(ConfigUpgradePack.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigUpgradePack.class);
 
   /**
    * no-arg default constructor for JAXB


[06/50] [abbrv] ambari git commit: AMBARI-21070. Race condition: webhdfs call mkdir /tmp/druid-indexing before /tmp making tmp not writable. (aonishuk)

Posted by nc...@apache.org.
AMBARI-21070. Race condition: webhdfs call  mkdir /tmp/druid-indexing before  /tmp  making tmp not writable. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0dd9fbf3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0dd9fbf3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0dd9fbf3

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 0dd9fbf34764de407d9605f4472da03bf466cad6
Parents: bc90de2
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Jun 6 13:59:28 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Jun 6 13:59:28 2017 +0300

----------------------------------------------------------------------
 .../DRUID/0.9.2/package/scripts/druid.py        |  7 ++++++
 .../DRUID/0.9.2/package/scripts/params.py       |  1 +
 .../test/python/stacks/2.6/DRUID/test_druid.py  | 23 +++++++++++++++++++-
 .../test/python/stacks/2.6/configs/default.json |  3 ++-
 4 files changed, 32 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0dd9fbf3/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
index 18febeb..cc02519 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
@@ -159,6 +159,13 @@ def ensure_hadoop_directories():
   # Create HadoopIndexTask hadoopWorkingPath
   hadoop_working_path = druid_middlemanager_config['druid.indexer.task.hadoopWorkingPath']
   if hadoop_working_path is not None:
+    if hadoop_working_path.startswith(params.hdfs_tmp_dir):
+      params.HdfsResource(params.hdfs_tmp_dir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hdfs_user,
+                           mode=0777,
+      )
     create_hadoop_directory(hadoop_working_path)
 
   # If HDFS is used for storing logs, create Index Task log directory

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dd9fbf3/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
index b1d9472..d47e604 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
@@ -125,6 +125,7 @@ hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name',
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 dfs_type = default("/commandParams/dfs_type", "")
+hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
 
 # Kerberos
 druid_principal_name = default('/configurations/druid-common/druid.hadoop.security.kerberos.principal',

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dd9fbf3/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
index f35fd8e..41b0bf5 100644
--- a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
+++ b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
@@ -622,7 +622,28 @@ class TestDruid(RMFTestCase):
                               dfs_type = '',
                               mode=0755
                               )
-
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        dfs_type = '',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = 'missing_principal',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+        immutable_paths = [u'/apps/hive/warehouse',
+           u'/apps/falcon',
+           u'/mr-history/done',
+           u'/app-logs',
+           u'/tmp'],
+        mode = 0777,
+    )
     self.assertResourceCalled('HdfsResource', '/tmp/druid-indexing',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dd9fbf3/ambari-server/src/test/python/stacks/2.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/default.json b/ambari-server/src/test/python/stacks/2.6/configs/default.json
index 54000e6..2570657 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/default.json
@@ -54,7 +54,8 @@
       "content": "\n            #\n            #\n            # Licensed to the Apache Software Foundation (ASF) under one\n            # or more contributor license agreements.  See the NOTICE file\n            # distributed with this work for additional information\n            # regarding copyright ownership.  The ASF licenses this file\n            # to you under the Apache License, Version 2.0 (the\n            # \"License\"); you may not use this file except in compliance\n            # with the License.  You may obtain a copy of the License at\n            #\n            #   http://www.apache.org/licenses/LICENSE-2.0\n            #\n            # Unless required by applicable law or agreed to in writing,\n            # software distributed under the License is distributed on an\n            # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n            # KIND, either express or implied.  See the License for the\n            # specific language governing permissions a
 nd limitations\n            # under the License.\n            #\n            #\n            #\n\n            # Set everything to be logged to the console\n            log4j.rootCategory=WARN, console\n            log4j.appender.console=org.apache.log4j.ConsoleAppender\n            log4j.appender.console.target=System.err\n            log4j.appender.console.layout=org.apache.log4j.PatternLayout\n            log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n            # Settings to quiet third party logs that are too verbose\n            log4j.logger.org.eclipse.jetty=WARN\n            log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN\n            log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN"
     },
     "hadoop-env": {
-      "hdfs_user": "hdfs"
+      "hdfs_user": "hdfs",
+      "hdfs_tmp_dir": "/tmp"
     },
     "core-site": {
       "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"


[27/50] [abbrv] ambari git commit: AMBARI-21184. When trying to Add Hiveserver2 service on a node, we just get a pop-up dialog box, and then a spinning wheel. Unable to click "Confirm Add" (alexantonenko)

Posted by nc...@apache.org.
AMBARI-21184. When trying to Add Hiveserver2 service on a node, we just get a pop-up dialog box, and then a spinning wheel. Unable to click "Confirm Add" (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7e3641eb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7e3641eb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7e3641eb

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 7e3641eba8ce974c7768a85f65b5910bd5ff55ca
Parents: 0b6d0dc
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Jun 8 17:19:00 2017 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Jun 8 19:03:18 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/utils/config.js       | 2 +-
 ambari-web/test/utils/config_test.js | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7e3641eb/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index 8e6e2fd..00cc2a3 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -1251,7 +1251,7 @@ App.config = Em.Object.create({
   getTempletonHiveHosts: function (value) {
     var pattern = /thrift:\/\/.+:\d+/,
       patternMatch = value.match(pattern);
-    return patternMatch ? patternMatch[0].split('\\,') : value;
+    return patternMatch ? patternMatch[0].split('\\,') : [];
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/7e3641eb/ambari-web/test/utils/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/config_test.js b/ambari-web/test/utils/config_test.js
index 7ad8828..2c7baf9 100644
--- a/ambari-web/test/utils/config_test.js
+++ b/ambari-web/test/utils/config_test.js
@@ -1469,7 +1469,7 @@ describe('App.config', function() {
       },
       {
         value: 'hive.metastore.local=false',
-        result: 'hive.metastore.local=false',
+        result: [],
         message: 'no hosts list'
       }
     ];


[42/50] [abbrv] ambari git commit: AMBARI-21198. Hive2. Query result improvements (pallavkul)

Posted by nc...@apache.org.
AMBARI-21198. Hive2. Query result improvements (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d5105226
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d5105226
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d5105226

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d5105226bcfcb97d938e92d40914ef3a083c2d77
Parents: 13798c2
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jun 12 14:47:02 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jun 12 14:47:02 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/models/worksheet.js   |  1 +
 .../resources/ui/app/routes/queries/query.js    | 15 +---
 .../ui/app/routes/queries/query/results.js      |  6 +-
 .../templates/components/query-result-table.hbs | 92 ++++++++++----------
 .../ui/app/templates/queries/query.hbs          | 10 +--
 .../ui/app/templates/queries/query/log.hbs      | 12 ++-
 .../ui/app/templates/queries/query/results.hbs  |  9 +-
 7 files changed, 74 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
index 12a681c..e3c4d07 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
@@ -29,6 +29,7 @@ export default DS.Model.extend({
   nextPage: DS.attr('number', {defaultValue: 1}),
   selected: DS.attr('boolean', {transient: true, defaultValue: false}),
   jobData: DS.attr({defaultValue: []}),
+  currentJobId: DS.attr({defaultValue: null}),
   currentJobData: DS.attr({defaultValue: null}),
   hasNext: DS.attr('boolean', { defaultValue: false}),
   hasPrevious: DS.attr('boolean', { defaultValue: false}),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 6b395ac..f6256898 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -128,30 +128,23 @@ export default Ember.Route.extend(UILoggerMixin, {
     controller.set('worksheet', model);
     controller.set('selectedTablesModels',model.get('selectedTablesModels') || selectedTablesModels );
     controller.set('selectedMultiDb', model.get('selectedMultiDb') || selectedMultiDb);
-
     controller.set('isQueryRunning', model.get('isQueryRunning'));
     controller.set('currentQuery', model.get('query'));
-    controller.set('currentJobId', null);
+    controller.set('currentJobId', model.get('currentJobId'));
     controller.set('queryResult', model.get('queryResult'));
     controller.set('isJobSuccess', model.get('isJobSuccess'));
     controller.set('isJobCancelled', model.get('isJobCancelled'));
     controller.set('isJobCreated', model.get('isJobCreated'));
-
     controller.set('isExportResultSuccessMessege', false);
     controller.set('isExportResultFailureMessege', false);
     controller.set('showSaveHdfsModal', false);
-
     controller.set('logResults', model.get('logResults') || '');
-
     controller.set('isVisualExplainQuery', false);
     controller.set('visualExplainJson', model.get('visualExplainJson'));
-
     controller.set('showWorksheetModal',false);
     controller.set('worksheetModalSuccess',false);
     controller.set('worksheetModalFail',false);
-
     controller.set('tabs', tabs);
-
   },
   checkIfDeafultDatabaseExists(alldatabases){
     if(this.get('controller.model').get('selectedDb')) {
@@ -248,7 +241,7 @@ export default Ember.Route.extend(UILoggerMixin, {
     executeQuery(isVisualExplainQuery){
 
       let self = this, ctrlr = self.get('controller'), ctrlrModel = self.get('controller.model');
-      this.get('controller').set('currentJobId', null);
+      this.get('controller.model').set('currentJobId', null);
       if(!Ember.isEmpty(isVisualExplainQuery)){
         isVisualExplainQuery = true;
         this.get('controller').set('isVisualExplainQuery', true);
@@ -325,7 +318,7 @@ export default Ember.Route.extend(UILoggerMixin, {
         self.get('controller.model').set('currentJobData', data);
         self.get('controller.model').set('queryFile', data.job.queryFile);
         self.get('controller.model').set('logFile', data.job.logFile);
-        self.get('controller').set('currentJobId', data.job.id);
+        self.get('controller.model').set('currentJobId', data.job.id);
         ctrlrModel.set('isJobCreated',true);
         ctrlr.set('isJobCreated',true);
 
@@ -362,7 +355,7 @@ export default Ember.Route.extend(UILoggerMixin, {
     },
 
     stopQuery(){
-      let jobId = this.get('controller').get('currentJobId');
+      let jobId = this.get('controller.model').get('currentJobId');
       this.get('jobs').stopJob(jobId)
         .then( data => this.get('controller').set('isJobCancelled', true));
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query/results.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query/results.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query/results.js
index df970fb..66c6112 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query/results.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query/results.js
@@ -37,27 +37,23 @@ export default Ember.Route.extend(UILoggerMixin, {
     model.set('lastResultRoute', ".results");
 
     if(!Ember.isEmpty(model.get('currentJobData'))){
-
       let jobId = model.get('currentJobData').job.id;
+      this.controller.set('model', model);
       this.controller.set('jobId', jobId);
       this.controller.set('payloadTitle',  model.get('currentJobData').job.title);
       this.controller.set('isQueryRunning', model.get('isQueryRunning'));
       this.controller.set('previousPage', model.get('previousPage'));
       this.controller.set('hasNext', model.get('hasNext'));
       this.controller.set('hasPrevious', model.get('hasPrevious'));
-
       this.controller.set('queryResult', model.get('queryResult'));
       this.controller.set('isExportResultSuccessMessege', false);
       this.controller.set('isExportResultFailureMessege', false);
       this.controller.set('showSaveHdfsModal', false);
       this.controller.set('showDownloadCsvModal', false);
-
-
       this.controller.set('hasJobAssociated', true);
     } else {
       this.controller.set('hasJobAssociated', false);
     }
-
   },
 
   actions:{

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
index 395987f..c75ef48 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
@@ -16,54 +16,58 @@
 * limitations under the License.
 }}
 
-  <div class="clearfix">
-    <div class="col-md-2">
-      <div class="row input-group">
-        {{input type="text" value=columnFilterText placeholder="Filter columns" class="form-control"}}
-        <span class="input-group-btn">
-          <button class="btn btn-default" {{action "clearColumnsFilter"}}>{{fa-icon "times"}} </button>
-        </span>
-      </div>
-    </div>
-    <div class="pull-right">
-      <span class="dropdown">
-      <button class="btn btn-default dropdown-toggle" title="Actions" data-toggle="dropdown">{{fa-icon "bars"}} </button>
-      <ul class="dropdown-menu dropdown-menu-right">
-        <li><a href="#" {{action "openSaveHdfsModal" }} class="text-uppercase">{{fa-icon "save"}} Save To HDFS</a></li>
-        <li><a href="#" {{action "openDownloadCsvModal" }} class="text-uppercase">{{fa-icon "download"}} Download As CSV</a></li>
-      </ul>
-    </span>&nbsp;
+{{#unless isQueryRunning}}
+    {{#if currentJobData}}
+      <div class="clearfix">
+        <div class="col-md-2">
+          <div class="row input-group">
+            {{input type="text" value=columnFilterText placeholder="Filter columns" class="form-control"}}
+            <span class="input-group-btn">
+              <button class="btn btn-default" {{action "clearColumnsFilter"}}>{{fa-icon "times"}} </button>
+            </span>
+          </div>
+        </div>
+        <div class="pull-right">
+          <span class="dropdown">
+          <button class="btn btn-default dropdown-toggle" title="Actions" data-toggle="dropdown">{{fa-icon "bars"}} </button>
+          <ul class="dropdown-menu dropdown-menu-right">
+            <li><a href="#" {{action "openSaveHdfsModal" }} class="text-uppercase">{{fa-icon "save"}} Save To HDFS</a></li>
+            <li><a href="#" {{action "openDownloadCsvModal" }} class="text-uppercase">{{fa-icon "download"}} Download As CSV</a></li>
+          </ul>
+        </span>&nbsp;
 
-      <button class="btn btn-default" title="Previous Page" {{action "goPrevPage" payloadTitle }} disabled={{not hasPrevious}} >{{fa-icon "arrow-left"}} </button>
-      <button class="btn btn-default" title="Next Page" {{action "goNextPage" payloadTitle}} disabled={{not hasNext}} >{{fa-icon "arrow-right"}} </button> &nbsp;
-      <button class="btn btn-default" title="Expand/Collspse" {{action "expandQueryResultPanel" }}>{{fa-icon "expand"}}</button>
-    </div>
-  </div>
+          <button class="btn btn-default" title="Previous Page" {{action "goPrevPage" payloadTitle }} disabled={{not hasPrevious}} >{{fa-icon "arrow-left"}} </button>
+          <button class="btn btn-default" title="Next Page" {{action "goNextPage" payloadTitle}} disabled={{not hasNext}} >{{fa-icon "arrow-right"}} </button> &nbsp;
+          <button class="btn btn-default" title="Expand/Collspse" {{action "expandQueryResultPanel" }}>{{fa-icon "expand"}}</button>
+        </div>
+      </div>
 
-  <div class="col-md-12">
-    <div class="row">
-      <div class="query-result">
-        <table class="table table-striped">
-          <thead>
-            <tr>
-              {{#each filteredColumns as |column|}}
-                <th>{{column.label}}</th>
-              {{/each}}
-            </tr>
-          </thead>
-          <tbody>
-          {{#each rows as |row|}}
-            <tr>
-              {{#each filteredColumns as |column|}}
-                <td>{{extract-value row column.valuePath}}</td>
+      <div class="col-md-12">
+        <div class="row">
+          <div class="query-result">
+            <table class="table table-striped">
+              <thead>
+                <tr>
+                  {{#each filteredColumns as |column|}}
+                    <th>{{column.label}}</th>
+                  {{/each}}
+                </tr>
+              </thead>
+              <tbody>
+              {{#each rows as |row|}}
+                <tr>
+                  {{#each filteredColumns as |column|}}
+                    <td>{{extract-value row column.valuePath}}</td>
+                  {{/each}}
+                </tr>
               {{/each}}
-            </tr>
-          {{/each}}
-          </tbody>
-        </table>
+              </tbody>
+            </table>
+          </div>
+        </div>
       </div>
-    </div>
-  </div>
+    {{/if}}
+{{/unless}}
 
 {{#if showSaveHdfsModal}}
   {{export-result

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 7b27ba7..16fb466 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -56,11 +56,11 @@
 
       </div>
     </div>
-
-    {{#tabs-pane tabs=tabs inverse=true as |tab|}}
-      {{tabs-item tab=tab tabs=tabs}}
-    {{/tabs-pane}}
-
+    {{#if worksheet.currentJobId}}
+        {{#tabs-pane tabs=tabs inverse=true as |tab|}}
+          {{tabs-item tab=tab tabs=tabs}}
+        {{/tabs-pane}}
+    {{/if}}
     {{outlet}}
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/log.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/log.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/log.hbs
index e9f1a50..6dc7633 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/log.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/log.hbs
@@ -23,7 +23,13 @@
 {{/unless}}
 
 {{#if hasJobAssociated}}
-  <div>
-    <pre class="prettyprint">{{logResults}}</pre>
-  </div>
+  {{#if logResults}}
+    <div>
+      <pre class="prettyprint">{{logResults}}</pre>
+    </div>
+  {{else}}
+    <div class="explain--error">
+      <div>No Log available.</div>
+    </div>
+  {{/if}}
 {{/if}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5105226/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/results.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/results.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/results.hbs
index 966312c..db0a79c 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/results.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/results.hbs
@@ -16,8 +16,6 @@
 * limitations under the License.
 }}
 
-{{outlet}}
-
 {{#unless hasJobAssociated}}
   <div class="explain--error">
     <div>No Results available.</div>
@@ -27,7 +25,9 @@
 {{#if hasJobAssociated}}
   {{query-result-table
   queryResult=queryResult
-  isQueryRunning=isQueryRunning
+  isQueryRunning=model.isQueryRunning
+  currentJobData=model.currentJobData
+  isJobCreated=model.isJobCreated
   jobId=jobId
   payloadTitle=payloadTitle
   previousPage=previousPage
@@ -45,6 +45,9 @@
   showDownloadCsvModal=showDownloadCsvModal
   }}
 {{/if}}
+{{outlet}}
+
+
 
 
 


[03/50] [abbrv] ambari git commit: AMBARI-21113. hdfs_user_nofile_limit is not picking as expected for datanode process in a secure cluster (aonishuk)

Posted by nc...@apache.org.
AMBARI-21113. hdfs_user_nofile_limit is not picking as expected for datanode process in a secure cluster (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4dba161a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4dba161a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4dba161a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4dba161a6fbeab2ab5507c9ff50f524242b7f450
Parents: b3425c9
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Jun 6 13:57:00 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Jun 6 13:57:00 2017 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog250.java       | 29 +++++++++++++++-----
 1 file changed, 22 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4dba161a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index e911a21..1f3a99d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -801,18 +801,20 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
       Map<String, Cluster> clusterMap = clusters.getClusters();
       Map<String, String> prop = new HashMap<>();
 
+
       if (clusterMap != null && !clusterMap.isEmpty()) {
         for (final Cluster cluster : clusterMap.values()) {
-          /*
-           * Append "ulimit -l" from hadoop-env.sh
-           */
+
           String content = null;
+          Boolean contentUpdated = false;
+
           if (cluster.getDesiredConfigByType(HADOOP_ENV) != null) {
             content = cluster.getDesiredConfigByType(HADOOP_ENV).getProperties().get("content");
           }
 
-          if (content != null && !content.contains("ulimit")) {
-            content += "\n" +
+          if (content != null) {
+            if (!content.contains("ulimit -l")) {  // Append "ulimit -l" to hadoop-env.sh
+              content += "\n" +
                 "{% if is_datanode_max_locked_memory_set %}\n" +
                 "# Fix temporary bug, when ulimit from conf files is not picked up, without full relogin. \n" +
                 "# Makes sense to fix only when runing DN as root \n" +
@@ -821,9 +823,22 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
                 "fi\n" +
                 "{% endif %}";
 
-            prop.put("content", content);
-            updateConfigurationPropertiesForCluster(cluster, "hadoop-env",
+              contentUpdated = true;
+            }
+
+            if (!content.contains("ulimit -n")){  // Append "ulimit -n" to hadoop-env.sh
+              content += "\n" +
+                "if [ \"$command\" == \"datanode\" ] && [ \"$EUID\" -eq 0 ] && [ -n \"$HADOOP_SECURE_DN_USER\" ]; then \n" +
+                "  ulimit -n {{hdfs_user_nofile_limit}}\n" +
+                "fi";
+              contentUpdated = true;
+            }
+
+            if (contentUpdated){
+              prop.put("content", content);
+              updateConfigurationPropertiesForCluster(cluster, "hadoop-env",
                 prop, true, false);
+            }
           }
         }
       }


[17/50] [abbrv] ambari git commit: AMBARI-21195. Enforce Maven version 3.3.9+

Posted by nc...@apache.org.
AMBARI-21195. Enforce Maven version 3.3.9+


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9b44b62f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9b44b62f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9b44b62f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 9b44b62f638c56c913b3977683a19ea89e39b4ec
Parents: bdadb70
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Wed Jun 7 18:43:17 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Thu Jun 8 11:09:29 2017 +0200

----------------------------------------------------------------------
 ambari-project/pom.xml |  5 +++++
 ambari-server/pom.xml  | 20 ++++++++++++++++++++
 2 files changed, 25 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9b44b62f/ambari-project/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-project/pom.xml b/ambari-project/pom.xml
index 98da9f4..64d1416 100644
--- a/ambari-project/pom.xml
+++ b/ambari-project/pom.xml
@@ -497,6 +497,11 @@
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-enforcer-plugin</artifactId>
+          <version>1.4.1</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-checkstyle-plugin</artifactId>
           <version>2.17</version>
           <configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9b44b62f/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index c0aed7b..669f90f 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -391,6 +391,26 @@
         </executions>
       </plugin>
       <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce-maven</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <configuration>
+              <rules>
+                <requireMavenVersion>
+                  <version>[3.3.9,)</version>
+                </requireMavenVersion>
+              </rules>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
         <groupId>com.github.kongchen</groupId>
         <artifactId>swagger-maven-plugin</artifactId>
         <version>3.1.4</version>


[40/50] [abbrv] ambari git commit: AMBARI-21123 - Part Two: Specify the script directly in alert target for script-based alert dispatchers(Lei Yao via rzang)

Posted by nc...@apache.org.
AMBARI-21123 - Part Two: Specify the script directly in alert target for script-based alert dispatchers(Lei Yao via rzang)

Change-Id: I56e01562f11f389d36ae87b8caa3841517f04812


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/62f4432c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/62f4432c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/62f4432c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 62f4432cc35e39a8fdd7540e6cfc4f5edbac19cb
Parents: 2bea120
Author: Richard Zang <rz...@apache.org>
Authored: Sun Jun 11 04:25:11 2017 -0700
Committer: Richard Zang <rz...@apache.org>
Committed: Sun Jun 11 04:25:11 2017 -0700

----------------------------------------------------------------------
 .../manage_alert_notifications_controller.js    | 35 ++++++++++++++++--
 ambari-web/app/messages.js                      |  2 ++
 .../main/alerts/create_alert_notification.hbs   | 14 ++++++++
 ambari-web/app/utils/validator.js               |  8 +++++
 ...anage_alert_notifications_controller_test.js | 38 +++++++++++++++++++-
 5 files changed, 93 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/62f4432c/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js b/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
index df15513..a417a73 100644
--- a/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
+++ b/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
@@ -154,6 +154,11 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
       value: '',
       defaultValue: ''
     },
+    scriptFileName: {
+      label: Em.I18n.t('alerts.actions.manage_alert_notifications_popup.scriptFileName'),
+      value: '',
+      defaultValue: ''
+    },
     customProperties: Em.A([])
   }),
 
@@ -289,7 +294,8 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
     'mail.smtp.host',
     'mail.smtp.port',
     'mail.smtp.starttls.enable',
-    'ambari.dispatch-property.script'
+    'ambari.dispatch-property.script',
+    'ambari.dispatch-property.script.filename'
   ],
 
   validationMap: {
@@ -339,7 +345,12 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
         validator: 'hostsValidation'
       }
     ],
-    AlertScript:[]
+    AlertScript:[
+     {
+       errorKey: 'scriptFileNameError',
+       validator: 'scriptFileNameValidation',
+     }
+    ]
   },
 
   /**
@@ -435,6 +446,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
     inputFields.set('global.value', selectedAlertNotification.get('global'));
     inputFields.set('allGroups.value', selectedAlertNotification.get('global') ? 'all' : 'custom');
     inputFields.set('scriptDispatchProperty.value', properties['ambari.dispatch-property.script'] || '');
+    inputFields.set('scriptFileName.value', properties['ambari.dispatch-property.script.filename'] || '');
     // not allow to edit global field
     inputFields.set('global.disabled', true);
     inputFields.set('description.value', selectedAlertNotification.get('description'));
@@ -478,6 +490,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
           this.smtpUsernameValidation();
           this.smtpPasswordValidation();
           this.retypePasswordValidation();
+          this.scriptFileNameValidation();
         },
 
         isEmailMethodSelected: Em.computed.equal('controller.inputFields.method.value', 'EMAIL'),
@@ -631,7 +644,19 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
           }
         }.observes('controller.inputFields.retypeSMTPPassword.value', 'controller.inputFields.SMTPPassword.value'),
 
-        someErrorExists: Em.computed.or('nameError', 'emailToError', 'emailFromError', 'smtpPortError', 'hostError', 'portError', 'smtpUsernameError', 'smtpPasswordError', 'passwordError'),
+
+        scriptFileNameValidation:function(){
+          var scriptFileNameValue = this.get('controller.inputFields.scriptFileName.value').trim();
+          if(!Em.isBlank(scriptFileNameValue) && !validator.isValidFileName(scriptFileNameValue)){
+             this.set('scriptFileNameError',true);
+             this.set('controller.inputFields.scriptFileName.errorMsg',Em.I18n.t('alerts.actions.manage_alert_notifications_popup.error.scriptFileName.invalid'));
+          }else{
+             this.set('scriptFileNameError',false);
+             this.set('controller.inputFields.scriptFileName.errorMsg',null);
+          }
+        }.observes('controller.inputFields.scriptFileName.value'),
+
+        someErrorExists: Em.computed.or('nameError', 'emailToError', 'emailFromError', 'smtpPortError', 'hostError', 'portError', 'smtpUsernameError', 'smtpPasswordError', 'passwordError','scriptFileNameError'),
 
         setParentErrors: function () {
           this.set('parentView.hasErrors', this.get('someErrorExists'));
@@ -779,6 +804,10 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
       var scriptDispatchProperty = inputFields.get('scriptDispatchProperty.value').trim();
       if( scriptDispatchProperty != '')
           properties['ambari.dispatch-property.script'] = scriptDispatchProperty;
+
+      var scriptFileName = inputFields.get('scriptFileName.value').trim();
+      if( scriptFileName != '')
+          properties['ambari.dispatch-property.script.filename'] = scriptFileName;
     }
     inputFields.get('customProperties').forEach(function (customProperty) {
       properties[customProperty.name] = customProperty.value;

http://git-wip-us.apache.org/repos/asf/ambari/blob/62f4432c/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index faad84c..4efd75f 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2547,6 +2547,8 @@ Em.I18n.translations = {
   'alerts.actions.manage_alert_notifications_popup.error.name.empty': 'Notification name is required',
   'alerts.actions.manage_alert_notifications_popup.error.name.existed': 'Notification name already exists',
   'alerts.actions.manage_alert_notifications_popup.scriptDispatchProperty':'Script Dispatch Property',
+  'alerts.actions.manage_alert_notifications_popup.scriptFileName':'Script Filename',
+  'alerts.actions.manage_alert_notifications_popup.error.scriptFileName.invalid': 'Invalid script filename',
 
   'hosts.host.add':'Add New Hosts',
   'hosts.table.noHosts':'No hosts to display',

http://git-wip-us.apache.org/repos/asf/ambari/blob/62f4432c/ambari-web/app/templates/main/alerts/create_alert_notification.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/create_alert_notification.hbs b/ambari-web/app/templates/main/alerts/create_alert_notification.hbs
index 7ec5b1e..afe00d5 100644
--- a/ambari-web/app/templates/main/alerts/create_alert_notification.hbs
+++ b/ambari-web/app/templates/main/alerts/create_alert_notification.hbs
@@ -317,6 +317,20 @@
 
     {{! alert-notification Alert Script }}
     {{#if view.isAlertScriptMethodSelected}}
+    <div {{bindAttr class=":form-group controller.inputFields.scriptFileName.errorMsg:has-error"}}>
+       <label class="control-label col-md-2">{{controller.inputFields.scriptFileName.label}}</label>
+
+       <div class="col-md-10">
+          {{view Em.TextField valueBinding="controller.inputFields.scriptFileName.value" class="form-control"}}
+       </div>
+
+       {{#if controller.inputFields.scriptFileName.errorMsg}}
+        <div class="col-md-10 col-md-offset-2 help-block validation-block error-msg">
+           {{controller.inputFields.scriptFileName.errorMsg}}
+        </div>
+       {{/if}}
+    </div>
+
     <div class="form-group">
       <label class="control-label col-md-2">{{controller.inputFields.scriptDispatchProperty.label}}</label>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/62f4432c/ambari-web/app/utils/validator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/validator.js b/ambari-web/app/utils/validator.js
index c069724..4c7255d 100644
--- a/ambari-web/app/utils/validator.js
+++ b/ambari-web/app/utils/validator.js
@@ -62,6 +62,14 @@ module.exports = {
   },
 
   /**
+   * validate filename
+   */
+  isValidFileName: function(value){
+    var filenameRegex = /^[0-9a-zA-Z_-]+\.[a-zA-Z]+$/;
+    return filenameRegex.test(value);
+  },
+
+  /**
    * defines if config value looks like link to other config
    * @param value
    * @returns {boolean}

http://git-wip-us.apache.org/repos/asf/ambari/blob/62f4432c/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js b/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
index 0d58afa..bbf4b98 100644
--- a/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
+++ b/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
@@ -90,6 +90,12 @@ function getInputFields() {
     },
     port: {
       value: ''
+    },
+    scriptDispatchProperty:{
+      value: ''
+    },
+    scriptFileName:{
+      value: ''
     }
   });
 }
@@ -339,6 +345,9 @@ describe('App.ManageAlertNotificationsController', function () {
         scriptDispatchProperty:{
           value: ''
         },
+        scriptFileName:{
+          value: ''
+        },
         customProperties: [
           {name: 'customName', value: 'customValue1', defaultValue: 'customValue1'},
           {name: 'customName2', value: 'customValue1', defaultValue: 'customValue1'}
@@ -407,6 +416,9 @@ describe('App.ManageAlertNotificationsController', function () {
         scriptDispatchProperty:{
           value: ''
         },
+        scriptFileName:{
+          value: ''
+        },
         customProperties: [
           {name: 'customName', value: 'customValue', defaultValue: 'customValue'}
         ]
@@ -504,6 +516,9 @@ describe('App.ManageAlertNotificationsController', function () {
         scriptDispatchProperty:{
           value: ''
         },
+        scriptFileName:{
+          value: ''
+        },
         customProperties: [
           {name: 'customName', value: 'customValue1', defaultValue: 'customValue1'},
           {name: 'customName2', value: 'customValue1', defaultValue: 'customValue1'}
@@ -568,6 +583,9 @@ describe('App.ManageAlertNotificationsController', function () {
         scriptDispatchProperty:{
           value: ''
         },
+        scriptFileName:{
+          value: ''
+        },
         customProperties: [
           {name: 'customName', value: 'customValue', defaultValue: 'customValue'}
         ]
@@ -664,6 +682,9 @@ describe('App.ManageAlertNotificationsController', function () {
         scriptDispatchProperty:{
           value: ''
         },
+        scriptFileName:{
+          value: ''
+        },
         customProperties: [
           {name: 'customName', value: 'customValue1', defaultValue: 'customValue1'},
           {name: 'customName2', value: 'customValue1', defaultValue: 'customValue1'}
@@ -726,6 +747,9 @@ describe('App.ManageAlertNotificationsController', function () {
         scriptDispatchProperty:{
           value: ''
         },
+        scriptFileName:{
+          value: ''
+        },
         customProperties: [
           {name: 'customName', value: 'customValue', defaultValue: 'customValue'}
         ]
@@ -744,6 +768,7 @@ describe('App.ManageAlertNotificationsController', function () {
             alertStates: ['OK', 'UNKNOWN'],
             properties: {
               'ambari.dispatch-property.script': "com.mycompany.dispatch.syslog.script",
+              'ambari.dispatch-property.script.filename': 'a.py',
               'customName': 'customValue'
             }
           }));
@@ -815,6 +840,9 @@ describe('App.ManageAlertNotificationsController', function () {
             scriptDispatchProperty: {
               value: ''
             },
+            scriptFileName: {
+              value: ''
+            },
             customProperties: [
               {name: 'customName', value: 'customValue1', defaultValue: 'customValue1'},
               {name: 'customName2', value: 'customValue1', defaultValue: 'customValue1'}
@@ -871,6 +899,9 @@ describe('App.ManageAlertNotificationsController', function () {
             scriptDispatchProperty: {
                value: 'com.mycompany.dispatch.syslog.script'
             },
+            scriptFileName:{
+               value: 'a.py'
+            },
             customProperties: [
               {name: 'customName', value: 'customValue', defaultValue: 'customValue'}
             ]
@@ -929,7 +960,7 @@ describe('App.ManageAlertNotificationsController', function () {
         view = getBodyClass();
       });
 
-      App.TestAliases.testAsComputedOr(getBodyClass(), 'someErrorExists', ['nameError', 'emailToError', 'emailFromError', 'smtpPortError', 'hostError', 'portError', 'smtpUsernameError', 'smtpPasswordError', 'passwordError']);
+      App.TestAliases.testAsComputedOr(getBodyClass(), 'someErrorExists', ['nameError', 'emailToError', 'emailFromError', 'smtpPortError', 'hostError', 'portError', 'smtpUsernameError', 'smtpPasswordError', 'passwordError','scriptFileNameError']);
 
       describe('#selectAllGroups', function () {
 
@@ -1140,6 +1171,11 @@ describe('App.ManageAlertNotificationsController', function () {
               method: 'SNMP',
               errors: ['emailToError', 'emailFromError', 'smtpPortError', 'smtpUsernameError', 'smtpPasswordError', 'passwordError'],
               validators: ['portValidation', 'hostsValidation']
+            },
+            {
+              method: 'ALERT_SCRIPT',
+              errors: ['scriptFileNameError'],
+              validators: ['scriptFileNameValidation']
             }
           ],
           validators = [];


[48/50] [abbrv] ambari git commit: AMBARI-21211. Missing os_family causes a warning during host check (Attila Magyar via adoroszlai)

Posted by nc...@apache.org.
AMBARI-21211. Missing os_family causes a warning during host check (Attila Magyar via adoroszlai)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d0eaaae8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d0eaaae8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d0eaaae8

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d0eaaae8e4bbfe3c1ae0e692aeeb64e04ca29bfe
Parents: c7b7743
Author: Attila Magyar <am...@hortonworks.com>
Authored: Mon Jun 12 13:40:29 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Mon Jun 12 13:40:29 2017 +0200

----------------------------------------------------------------------
 .../src/main/java/org/apache/ambari/server/state/host/HostImpl.java | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d0eaaae8/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
index aa87f7d..3ceaa48 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
@@ -937,6 +937,7 @@ public class HostImpl implements Host {
     r.setLastRegistrationTime(getLastRegistrationTime());
     r.setOsArch(getOsArch());
     r.setOsType(getOsType());
+    r.setOsFamily(getOsFamily());
     r.setRackInfo(getRackInfo());
     r.setTotalMemBytes(getTotalMemBytes());
     r.setPublicHostName(getPublicHostName());


[31/50] [abbrv] ambari git commit: AMBARI-19369. Add Kerberos HTTP SPNEGO authentication support to Hadoop/hbase/kafka/storm sinks (Qin Liu via rlevas)

Posted by nc...@apache.org.
AMBARI-19369. Add Kerberos HTTP SPNEGO authentication support to Hadoop/hbase/kafka/storm sinks (Qin Liu via rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4aaf259e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4aaf259e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4aaf259e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4aaf259e191344076a88391f5853da4bf85b8a80
Parents: b98f07f
Author: Qin Liu <qi...@gmail.com>
Authored: Thu Jun 8 16:23:34 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Jun 8 16:23:34 2017 -0400

----------------------------------------------------------------------
 ambari-metrics/ambari-metrics-common/pom.xml    |   5 +
 .../timeline/AbstractTimelineMetricsSink.java   |  60 +++++
 .../sink/timeline/AppCookieManager.java         | 219 +++++++++++++++++++
 .../sink/timeline/AppCookieManagerTest.java     |  52 +++++
 .../0.1.0/configuration/ams-hbase-env.xml       |   4 +-
 .../package/templates/hbase_master_jaas.conf.j2 |  10 +
 .../templates/hbase_regionserver_jaas.conf.j2   |  10 +
 .../package/templates/hbase_master_jaas.conf.j2 |  10 +
 .../templates/hbase_regionserver_jaas.conf.j2   |  10 +
 .../HBASE/2.0.0.3.0/configuration/hbase-env.xml |   4 +-
 .../package/templates/hbase_master_jaas.conf.j2 |  10 +
 .../templates/hbase_regionserver_jaas.conf.j2   |  10 +
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |  17 ++
 .../package/templates/hdfs_dn_jaas.conf.j2      |  27 +++
 .../package/templates/hdfs_jn_jaas.conf.j2      |  27 +++
 .../package/templates/hdfs_nn_jaas.conf.j2      |  27 +++
 .../HDFS/3.0.0.3.0/package/scripts/hdfs.py      |  17 ++
 .../package/templates/hdfs_dn_jaas.conf.j2      |  27 +++
 .../package/templates/hdfs_jn_jaas.conf.j2      |  27 +++
 .../package/templates/hdfs_nn_jaas.conf.j2      |  27 +++
 .../KAFKA/0.8.1/configuration/kafka-env.xml     |   4 +
 .../0.8.1/configuration/kafka_jaas_conf.xml     |  11 +
 .../0.8.1/package/templates/kafka_jaas.conf.j2  |  11 +
 .../0.9.1/package/scripts/storm_yaml_utils.py   |   5 +-
 .../0.9.1/package/templates/storm_jaas.conf.j2  |  10 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  32 ++-
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |  17 ++
 .../package/templates/mapred_jaas.conf.j2       |  28 +++
 .../package/templates/yarn_ats_jaas.conf.j2     |  27 +++
 .../package/templates/yarn_jaas.conf.j2         |  12 +-
 .../package/templates/yarn_nm_jaas.conf.j2      |  27 +++
 .../configuration-mapred/mapred-env.xml         |   4 +-
 .../YARN/3.0.0.3.0/configuration/yarn-env.xml   |  15 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |  32 ++-
 .../YARN/3.0.0.3.0/package/scripts/yarn.py      |  19 +-
 .../package/templates/mapred_jaas.conf.j2       |  28 +++
 .../package/templates/yarn_ats_jaas.conf.j2     |  27 +++
 .../package/templates/yarn_jaas.conf.j2         |  12 +-
 .../package/templates/yarn_nm_jaas.conf.j2      |  27 +++
 .../YARN/configuration-mapred/mapred-env.xml    |   4 +-
 .../services/HBASE/configuration/hbase-env.xml  |   4 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   7 +
 .../services/YARN/configuration/yarn-env.xml    |  16 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   7 +
 .../services/HDFS/configuration/hadoop-env.xml  |   7 +
 .../YARN/configuration-mapred/mapred-env.xml    |   4 +-
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  10 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |  11 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  24 +-
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |  10 +
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |  12 +-
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  17 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |  10 +
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |  10 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       |  10 +
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |  10 +
 .../stacks/2.0.6/YARN/test_yarn_client.py       |  10 +
 57 files changed, 1084 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/pom.xml b/ambari-metrics/ambari-metrics-common/pom.xml
index 62ae75f..f0d3963 100644
--- a/ambari-metrics/ambari-metrics-common/pom.xml
+++ b/ambari-metrics/ambari-metrics-common/pom.xml
@@ -189,5 +189,10 @@
       <artifactId>powermock-module-junit4</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <version>4.2.5</version>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index a8dc571..fddf4b3 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorHAHe
 import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorUnavailableException;
 import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardHostnameHashingStrategy;
 import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardStrategy;
+import org.apache.http.HttpStatus;
 import org.codehaus.jackson.map.AnnotationIntrospector;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
@@ -83,6 +84,9 @@ public abstract class AbstractTimelineMetricsSink {
   public static final String COLLECTOR_LIVE_NODES_PATH = "/ws/v1/timeline/metrics/livenodes";
   public static final String INSTANCE_ID_PROPERTY = "instanceId";
   public static final String SET_INSTANCE_ID_PROPERTY = "set.instanceId";
+  public static final String COOKIE = "Cookie";
+  private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+  private static final String NEGOTIATE = "Negotiate";
 
   protected static final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
   public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
@@ -97,6 +101,7 @@ public abstract class AbstractTimelineMetricsSink {
   private long lastFailedZkRequestTime = 0l;
 
   private SSLSocketFactory sslSocketFactory;
+  private AppCookieManager appCookieManager = null;
 
   protected final Log LOG;
 
@@ -157,6 +162,18 @@ public abstract class AbstractTimelineMetricsSink {
       connection = connectUrl.startsWith("https") ?
           getSSLConnection(connectUrl) : getConnection(connectUrl);
 
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("emitMetricsJson to " + connectUrl + ", " + jsonData);
+      }
+      AppCookieManager appCookieManager = getAppCookieManager();
+      String appCookie = appCookieManager.getCachedAppCookie(connectUrl);
+      if (appCookie != null) {
+        if (LOG.isInfoEnabled()) {
+          LOG.info("Using cached app cookie for URL:" + connectUrl);
+        }
+        connection.setRequestProperty(COOKIE, appCookie);
+      }
+
       connection.setRequestMethod("POST");
       connection.setRequestProperty("Content-Type", "application/json");
       connection.setRequestProperty("Connection", "Keep-Alive");
@@ -171,6 +188,37 @@ public abstract class AbstractTimelineMetricsSink {
       }
 
       int statusCode = connection.getResponseCode();
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("emitMetricsJson: statusCode = " + statusCode);
+      }
+
+      if (statusCode == HttpStatus.SC_UNAUTHORIZED ) {
+        String wwwAuthHeader = connection.getHeaderField(WWW_AUTHENTICATE);
+        if (LOG.isInfoEnabled()) {
+          LOG.info("Received WWW-Authentication header:" + wwwAuthHeader + ", for URL:" + connectUrl);
+        }
+        if (wwwAuthHeader != null && wwwAuthHeader.trim().startsWith(NEGOTIATE)) {
+          appCookie = appCookieManager.getAppCookie(connectUrl, true);
+          if (appCookie != null) {
+            connection.setRequestProperty(COOKIE, appCookie);
+
+            if (jsonData != null) {
+              try (OutputStream os = connection.getOutputStream()) {
+                os.write(jsonData.getBytes("UTF-8"));
+              }
+            }
+
+            statusCode = connection.getResponseCode();
+            if (LOG.isDebugEnabled()) {
+              LOG.debug("emitMetricsJson: statusCode2 = " + statusCode);
+            }
+          }
+        } else {
+          // no supported authentication type found
+          // we would let the original response propagate
+          LOG.error("Unsupported WWW-Authentication header:" + wwwAuthHeader+ ", for URL:" + connectUrl);
+        }
+      }
 
       if (statusCode != 200) {
         LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
@@ -265,6 +313,18 @@ public abstract class AbstractTimelineMetricsSink {
   }
 
   /**
+   * Get the associated app cookie manager.
+   *
+   * @return the app cookie manager
+   */
+  public synchronized AppCookieManager getAppCookieManager() {
+    if (appCookieManager == null) {
+      appCookieManager = new AppCookieManager();
+    }
+    return appCookieManager;
+  }
+
+  /**
    * Cleans up and closes an input stream
    * see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
    * @param is the InputStream to clean up

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
new file mode 100644
index 0000000..bcba238
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import java.io.IOException;
+import java.net.URI;
+import java.security.Principal;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.http.Header;
+import org.apache.http.HeaderElement;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpRequest;
+import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.Credentials;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpOptions;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.params.AuthPolicy;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.util.EntityUtils;
+
+/**
+ * Handles SPNego authentication as a client of hadoop service, caches
+ * hadoop.auth cookie returned by hadoop service on successful SPNego
+ * authentication. Refreshes hadoop.auth cookie on demand if the cookie has
+ * expired.
+ *
+ */
+public class AppCookieManager {
+
+  static final String HADOOP_AUTH = "hadoop.auth";
+  private static final String HADOOP_AUTH_EQ = "hadoop.auth=";
+  private static final String SET_COOKIE = "Set-Cookie";
+
+  private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
+
+  private Map<String, String> endpointCookieMap = new ConcurrentHashMap<String, String>();
+  private static Log LOG = LogFactory.getLog(AppCookieManager.class);
+
+  /**
+   * Utility method to exercise AppCookieManager directly
+   * @param args element 0 of args should be a URL to hadoop service protected by SPengo
+   * @throws IOException in case of errors
+   */
+  public static void main(String[] args) throws IOException {
+    new AppCookieManager().getAppCookie(args[0], false);
+  }
+
+  public AppCookieManager() {
+  }
+
+  /**
+   * Returns hadoop.auth cookie, doing needed SPNego authentication
+   *
+   * @param endpoint
+   *          the URL of the Hadoop service
+   * @param refresh
+   *          flag indicating wehther to refresh the cookie, if
+   *          <code>true</code>, we do a new SPNego authentication and refresh
+   *          the cookie even if the cookie already exists in local cache
+   * @return hadoop.auth cookie value
+   * @throws IOException
+   *           in case of problem getting hadoop.auth cookie
+   */
+  public String getAppCookie(String endpoint, boolean refresh)
+      throws IOException {
+
+    HttpUriRequest outboundRequest = new HttpGet(endpoint);
+    URI uri = outboundRequest.getURI();
+    String scheme = uri.getScheme();
+    String host = uri.getHost();
+    int port = uri.getPort();
+    String path = uri.getPath();
+    if (!refresh) {
+      String appCookie = endpointCookieMap.get(endpoint);
+      if (appCookie != null) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("got cached cookie");
+        }
+        return appCookie;
+      }
+    }
+
+    clearAppCookie(endpoint);
+
+    DefaultHttpClient client = new DefaultHttpClient();
+    SPNegoSchemeFactory spNegoSF = new SPNegoSchemeFactory(/* stripPort */true);
+    client.getAuthSchemes().register(AuthPolicy.SPNEGO, spNegoSF);
+    client.getCredentialsProvider().setCredentials(
+        new AuthScope(/* host */null, /* port */-1, /* realm */null),
+        EMPTY_JAAS_CREDENTIALS);
+
+    String hadoopAuthCookie = null;
+    HttpResponse httpResponse = null;
+    try {
+      HttpHost httpHost = new HttpHost(host, port, scheme);
+      HttpRequest httpRequest = new HttpOptions(path);
+      httpResponse = client.execute(httpHost, httpRequest);
+      Header[] headers = httpResponse.getHeaders(SET_COOKIE);
+      if (LOG.isDebugEnabled()) {
+        for (Header header : headers) {
+          LOG.debug(header.getName() + " : " + header.getValue());
+        }
+      }
+      hadoopAuthCookie = getHadoopAuthCookieValue(headers);
+      if (hadoopAuthCookie == null) {
+        int statusCode = httpResponse.getStatusLine().getStatusCode();
+        HttpEntity entity = httpResponse.getEntity();
+        String responseBody = entity != null ? EntityUtils.toString(entity) : null;
+        LOG.error("SPNego authentication failed with statusCode = " + statusCode + ", responseBody = " + responseBody + ", can not get hadoop.auth cookie for URL: " + endpoint);
+        return null;
+      }
+    } finally {
+      if (httpResponse != null) {
+        HttpEntity entity = httpResponse.getEntity();
+        if (entity != null) {
+          entity.getContent().close();
+        }
+      }
+
+    }
+
+    hadoopAuthCookie = HADOOP_AUTH_EQ + quote(hadoopAuthCookie);
+    setAppCookie(endpoint, hadoopAuthCookie);
+    if (LOG.isInfoEnabled()) {
+      LOG.info("Successful SPNego authentication to URL:" + uri.toString());
+    }
+    return hadoopAuthCookie;
+  }
+
+
+  /**
+   * Returns the cached app cookie
+   *  @param endpoint the hadoop end point we authenticate to
+   * @return the cached app cookie, can be null
+   */
+  public String getCachedAppCookie(String endpoint) {
+    return endpointCookieMap.get(endpoint);
+  }
+
+  /**
+   *  Sets the cached app cookie cache
+   *  @param endpoint the hadoop end point we authenticate to
+   *  @param appCookie the app cookie
+   */
+  private void setAppCookie(String endpoint, String appCookie) {
+    endpointCookieMap.put(endpoint, appCookie);
+  }
+
+  /**
+   *  Clears the cached app cookie
+   *  @param endpoint the hadoop end point we authenticate to
+   */
+  private void clearAppCookie(String endpoint) {
+    endpointCookieMap.remove(endpoint);
+  }
+
+  static String quote(String s) {
+    return s == null ? s : "\"" + s + "\"";
+  }
+
+  static String getHadoopAuthCookieValue(Header[] headers) {
+    if (headers == null) {
+      return null;
+    }
+    for (Header header : headers) {
+      HeaderElement[] elements = header.getElements();
+      for (HeaderElement element : elements) {
+        String cookieName = element.getName();
+        if (cookieName.equals(HADOOP_AUTH)) {
+          if (element.getValue() != null) {
+            String trimmedVal = element.getValue().trim();
+            if (!trimmedVal.isEmpty()) {
+              return trimmedVal;
+            }
+          }
+        }
+      }
+    }
+    return null;
+  }
+
+
+  private static class EmptyJaasCredentials implements Credentials {
+
+    public String getPassword() {
+      return null;
+    }
+
+    public Principal getUserPrincipal() {
+      return null;
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
new file mode 100644
index 0000000..8355288
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import org.apache.http.Header;
+import org.apache.http.message.BasicHeader;
+import org.junit.Test;
+
+public class AppCookieManagerTest {
+
+  @Test
+  public void getCachedAppCookie() {
+    assertNull(new AppCookieManager().getCachedAppCookie("http://dummy"));
+  }
+
+  @Test
+  public void getHadoopAuthCookieValueWithNullHeaders() {
+    assertNull(AppCookieManager.getHadoopAuthCookieValue(null));
+  }
+
+  @Test
+  public void getHadoopAuthCookieValueWitEmptylHeaders() {
+    assertNull(AppCookieManager.getHadoopAuthCookieValue(new Header[0]));
+  }
+
+  @Test
+  public void getHadoopAuthCookieValueWithValidlHeaders() {
+    Header[] headers = new Header[1];
+    headers[0] = new BasicHeader("Set-Cookie", AppCookieManager.HADOOP_AUTH + "=dummyvalue");
+    assertNotNull(AppCookieManager.getHadoopAuthCookieValue(headers));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
index db36db8..9c4fc02 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
@@ -255,8 +255,8 @@ export HBASE_MANAGES_ZK=false
 
 {% if security_enabled %}
 export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
 export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS -Djava.security.auth.login.config={{ams_zookeeper_jaas_config_file}}"
 {% endif %}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
index a93c36c..4bb0fc1 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
 keyTab="{{master_keytab_path}}"
 principal="{{master_jaas_princ}}";
 };
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
index 7097481..c9973ca 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
 keyTab="{{regionserver_keytab_path}}"
 principal="{{regionserver_jaas_princ}}";
 };
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
index a93c36c..4bb0fc1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
 keyTab="{{master_keytab_path}}"
 principal="{{master_jaas_princ}}";
 };
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
index 7097481..c9973ca 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
 keyTab="{{regionserver_keytab_path}}"
 principal="{{regionserver_jaas_princ}}";
 };
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
index da12706..cb30b63 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
@@ -225,8 +225,8 @@ JDK_DEPENDED_OPTS="-XX:PermSize=128m -XX:MaxPermSize=128m"
 
 {% if security_enabled %}
 export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} $JDK_DEPENDED_OPTS"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} $JDK_DEPENDED_OPTS"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
 export PHOENIX_QUERYSERVER_OPTS="$PHOENIX_QUERYSERVER_OPTS -Djava.security.auth.login.config={{queryserver_jaas_config_file}}"
 {% else %}
 export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
index a93c36c..4bb0fc1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
 keyTab="{{master_keytab_path}}"
 principal="{{master_jaas_princ}}";
 };
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
index 7097481..c9973ca 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
 keyTab="{{regionserver_keytab_path}}"
 principal="{{regionserver_jaas_princ}}";
 };
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index d9b62e2..15fda67 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -51,6 +51,23 @@ def hdfs(name=None):
   )
 
   if params.security_enabled:
+    File(os.path.join(params.hadoop_conf_dir, 'hdfs_dn_jaas.conf'),
+         owner=params.hdfs_user,
+         group=params.user_group,
+         content=Template("hdfs_dn_jaas.conf.j2")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hdfs_nn_jaas.conf'),
+         owner=params.hdfs_user,
+         group=params.user_group,
+         content=Template("hdfs_nn_jaas.conf.j2")
+    )
+    if params.dfs_ha_enabled:
+      File(os.path.join(params.hadoop_conf_dir, 'hdfs_jn_jaas.conf'),
+           owner=params.hdfs_user,
+           group=params.user_group,
+           content=Template("hdfs_jn_jaas.conf.j2")
+      )
+
     tc_mode = 0644
     tc_owner = "root"
   else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2
new file mode 100644
index 0000000..53583b4
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{dn_keytab}}"
+    principal="{{dn_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2
new file mode 100644
index 0000000..9769a6b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{jn_keytab}}"
+    principal="{{jn_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2
new file mode 100644
index 0000000..985a477
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{nn_keytab}}"
+    principal="{{nn_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
index d9b62e2..15fda67 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
@@ -51,6 +51,23 @@ def hdfs(name=None):
   )
 
   if params.security_enabled:
+    File(os.path.join(params.hadoop_conf_dir, 'hdfs_dn_jaas.conf'),
+         owner=params.hdfs_user,
+         group=params.user_group,
+         content=Template("hdfs_dn_jaas.conf.j2")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hdfs_nn_jaas.conf'),
+         owner=params.hdfs_user,
+         group=params.user_group,
+         content=Template("hdfs_nn_jaas.conf.j2")
+    )
+    if params.dfs_ha_enabled:
+      File(os.path.join(params.hadoop_conf_dir, 'hdfs_jn_jaas.conf'),
+           owner=params.hdfs_user,
+           group=params.user_group,
+           content=Template("hdfs_jn_jaas.conf.j2")
+      )
+
     tc_mode = 0644
     tc_owner = "root"
   else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2
new file mode 100644
index 0000000..53583b4
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{dn_keytab}}"
+    principal="{{dn_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2
new file mode 100644
index 0000000..9769a6b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{jn_keytab}}"
+    principal="{{jn_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2
new file mode 100644
index 0000000..985a477
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{nn_keytab}}"
+    principal="{{nn_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
index 91af58e..ad81d66 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
@@ -88,7 +88,11 @@ export JAVA_HOME={{java64_home}}
 export PATH=$PATH:$JAVA_HOME/bin
 export PID_DIR={{kafka_pid_dir}}
 export LOG_DIR={{kafka_log_dir}}
+{% if security_enabled %}
+export KAFKA_KERBEROS_PARAMS="-Djavax.security.auth.useSubjectCredsOnly=false {{kafka_kerberos_params}}"
+{% else %}
 export KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}
+{% endif %}
 # Add kafka sink to classpath and related depenencies
 if [ -e "/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar" ]; then
   export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
index fdde8f2..8ceb891 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
@@ -49,6 +49,17 @@ useTicketCache=false
 serviceName="zookeeper"
 principal="{{kafka_jaas_principal}}";
 };
+com.sun.security.jgss.krb5.initiate {
+   com.sun.security.auth.module.Krb5LoginModule required
+   renewTGT=false
+   doNotPrompt=true
+   useKeyTab=true
+   keyTab="{{kafka_keytab_path}}"
+   storeKey=true
+   useTicketCache=false
+   serviceName="{{kafka_bare_jaas_principal}}"
+   principal="{{kafka_jaas_principal}}";
+};
    </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
index 56c558d..1d9e61d 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
@@ -39,3 +39,14 @@ Client {
    serviceName="zookeeper"
    principal="{{kafka_jaas_principal}}";
 };
+com.sun.security.jgss.krb5.initiate {
+   com.sun.security.auth.module.Krb5LoginModule required
+   renewTGT=false
+   doNotPrompt=true
+   useKeyTab=true
+   keyTab="{{kafka_keytab_path}}"
+   storeKey=true
+   useTicketCache=false
+   serviceName="{{kafka_bare_jaas_principal}}"
+   principal="{{kafka_jaas_principal}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
index 9d78e71..557c9dc 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
@@ -27,7 +27,10 @@ from resource_management.core.resources.system import File
 def replace_jaas_placeholder(name, security_enabled, conf_dir):
   if name.find('_JAAS_PLACEHOLDER') > -1:
     if security_enabled:
-      return name.replace('_JAAS_PLACEHOLDER', '-Djava.security.auth.login.config=' + conf_dir + '/storm_jaas.conf')
+      if name.find('Nimbus_JVM') > -1:
+        return name.replace('_JAAS_PLACEHOLDER', '-Djava.security.auth.login.config=' + conf_dir + '/storm_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false')
+      else:
+        return name.replace('_JAAS_PLACEHOLDER', '-Djava.security.auth.login.config=' + conf_dir + '/storm_jaas.conf')
     else:
       return name.replace('_JAAS_PLACEHOLDER', '')
   else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
index c22cb51..d131e62 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
@@ -41,6 +41,16 @@ RegistryClient {
    useTicketCache=false
    principal="{{storm_jaas_principal}}";
 };
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{nimbus_keytab_path}}"
+    principal="{{nimbus_jaas_principal}}"
+    storeKey=true
+    useTicketCache=false;
+};
 {% endif %}
 Client {
    com.sun.security.auth.module.Krb5LoginModule required

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 3579fcb..f474a89 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -249,6 +249,9 @@ nm_hosts = default("/clusterHostInfo/nm_hosts", [])
 # don't using len(nm_hosts) here, because check can take too much time on large clusters
 number_of_nm = 1
 
+hs_host = default("/clusterHostInfo/hs_host", [])
+has_hs = not len(hs_host) == 0
+
 # default kinit commands
 rm_kinit_cmd = ""
 yarn_timelineservice_kinit_cmd = ""
@@ -272,19 +275,26 @@ if security_enabled:
 
   # YARN timeline security options
   if has_ats:
-    _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
-    _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
-    _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
-    yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
+    yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
+    yarn_timelineservice_principal_name = yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
+    yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
+    yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {yarn_timelineservice_keytab} {yarn_timelineservice_principal_name};")
+    yarn_ats_jaas_file = os.path.join(config_dir, 'yarn_ats_jaas.conf')
 
   if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
-    _nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
-    if _nodemanager_principal_name:
-      _nodemanager_principal_name = _nodemanager_principal_name.replace('_HOST', hostname.lower())
-
-    _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
-    nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
-
+    nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
+    if nodemanager_principal_name:
+      nodemanager_principal_name = nodemanager_principal_name.replace('_HOST', hostname.lower())
+
+    nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
+    nodemanager_kinit_cmd = format("{kinit_path_local} -kt {nodemanager_keytab} {nodemanager_principal_name};")
+    yarn_nm_jaas_file = os.path.join(config_dir, 'yarn_nm_jaas.conf')
+
+  if has_hs:
+    mapred_jhs_principal_name = config['configurations']['mapred-site']['mapreduce.jobhistory.principal']
+    mapred_jhs_principal_name = mapred_jhs_principal_name.replace('_HOST', hostname.lower())
+    mapred_jhs_keytab = config['configurations']['mapred-site']['mapreduce.jobhistory.keytab']
+    mapred_jaas_file = os.path.join(config_dir, 'mapred_jaas.conf')
 
 yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
 yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index 5ef08ad..28d14fe 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -192,6 +192,23 @@ def yarn(name=None, config_dir=None):
          group=params.user_group,
          content=Template("yarn_jaas.conf.j2")
     )
+    if params.has_ats:
+      File(os.path.join(config_dir, 'yarn_ats_jaas.conf'),
+           owner=params.yarn_user,
+           group=params.user_group,
+           content=Template("yarn_ats_jaas.conf.j2")
+      )
+    File(os.path.join(config_dir, 'yarn_nm_jaas.conf'),
+         owner=params.yarn_user,
+         group=params.user_group,
+         content=Template("yarn_nm_jaas.conf.j2")
+    )
+    if params.has_hs:
+      File(os.path.join(config_dir, 'mapred_jaas.conf'),
+           owner=params.mapred_user,
+           group=params.user_group,
+           content=Template("mapred_jaas.conf.j2")
+      )
   else:
     File(os.path.join(config_dir, 'taskcontroller.cfg'),
          owner=params.tc_owner,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2
new file mode 100644
index 0000000..67f4bcb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2
@@ -0,0 +1,28 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+com.sun.security.jgss.krb5.initiate {
+  com.sun.security.auth.module.Krb5LoginModule required
+  renewTGT=false
+  doNotPrompt=true
+  useKeyTab=true
+  keyTab="{{mapred_jhs_keytab}}"
+  principal="{{mapred_jhs_principal_name}}"
+  storeKey=true
+  useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2
new file mode 100644
index 0000000..55308e8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{yarn_timelineservice_keytab}}"
+    principal="{{yarn_timelineservice_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
index 483c815..99f0a1b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
@@ -23,4 +23,14 @@ Client {
   useTicketCache=false
   keyTab="{{rm_keytab}}"
   principal="{{rm_principal_name}}";
-};
\ No newline at end of file
+};
+com.sun.security.jgss.krb5.initiate {
+  com.sun.security.auth.module.Krb5LoginModule required
+  renewTGT=false
+  doNotPrompt=true
+  useKeyTab=true
+  keyTab="{{rm_keytab}}"
+  principal="{{rm_principal_name}}"
+  storeKey=true
+  useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2
new file mode 100644
index 0000000..b501c82
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{nodemanager_keytab}}"
+    principal="{{nodemanager_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
index 07cfafe..93e5234 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
@@ -89,7 +89,9 @@
 
       export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
 
-      #export HADOOP_JOB_HISTORYSERVER_OPTS=
+      {% if security_enabled %}
+      export HADOOP_JOB_HISTORYSERVER_OPTS="-Djava.security.auth.login.config={{mapred_jaas_file}}  -Djavax.security.auth.useSubjectCredsOnly=false"
+      {% endif %}
       #export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored.  $HADOOP_MAPRED_HOME/logs by default.
       #export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
       #export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
index 6a52865..aaa72d1 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
@@ -220,7 +220,9 @@ export YARN_RESOURCEMANAGER_HEAPSIZE={{resourcemanager_heapsize}}
 # Specify the JVM options to be used when starting the ResourceManager.
 # These options will be appended to the options specified as YARN_OPTS
 # and therefore may override any similar flags set in YARN_OPTS
-#export YARN_RESOURCEMANAGER_OPTS=
+{% if security_enabled %}
+export YARN_RESOURCEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_jaas_file}}"
+{% endif %}
 
 # Node Manager specific parameters
 
@@ -242,10 +244,16 @@ export YARN_NODEMANAGER_HEAPSIZE={{nodemanager_heapsize}}
 # or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
 export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}
 
+{% if security_enabled %}
+export YARN_TIMELINESERVER_OPTS="-Djava.security.auth.login.config={{yarn_ats_jaas_file}}"
+{% endif %}
+
 # Specify the JVM options to be used when starting the NodeManager.
 # These options will be appended to the options specified as YARN_OPTS
 # and therefore may override any similar flags set in YARN_OPTS
-#export YARN_NODEMANAGER_OPTS=
+{% if security_enabled %}
+export YARN_NODEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_nm_jaas_file}}"
+{% endif %}
 
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
@@ -286,6 +294,9 @@ YARN_OPTS="$YARN_OPTS -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
 {% if rm_security_opts is defined %}
 YARN_OPTS="{{rm_security_opts}} $YARN_OPTS"
 {% endif %}
+{% if security_enabled %}
+YARN_OPTS="$YARN_OPTS -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index 66194ed..a05d259 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@ -247,6 +247,9 @@ nm_hosts = default("/clusterHostInfo/nm_hosts", [])
 # don't using len(nm_hosts) here, because check can take too much time on large clusters
 number_of_nm = 1
 
+hs_host = default("/clusterHostInfo/hs_host", [])
+has_hs = not len(hs_host) == 0
+
 # default kinit commands
 rm_kinit_cmd = ""
 yarn_timelineservice_kinit_cmd = ""
@@ -268,19 +271,26 @@ if security_enabled:
 
   # YARN timeline security options
   if has_ats:
-    _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
-    _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
-    _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
-    yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
+    yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
+    yarn_timelineservice_principal_name = yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
+    yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
+    yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {yarn_timelineservice_keytab} {yarn_timelineservice_principal_name};")
+    yarn_ats_jaas_file = os.path.join(config_dir, 'yarn_ats_jaas.conf')
 
   if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
-    _nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
-    if _nodemanager_principal_name:
-      _nodemanager_principal_name = _nodemanager_principal_name.replace('_HOST', hostname.lower())
-
-    _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
-    nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
-
+    nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
+    if nodemanager_principal_name:
+      nodemanager_principal_name = nodemanager_principal_name.replace('_HOST', hostname.lower())
+
+    nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
+    nodemanager_kinit_cmd = format("{kinit_path_local} -kt {nodemanager_keytab} {nodemanager_principal_name};")
+    yarn_nm_jaas_file = os.path.join(config_dir, 'yarn_nm_jaas.conf')
+
+  if has_hs:
+    mapred_jhs_principal_name = config['configurations']['mapred-site']['mapreduce.jobhistory.principal']
+    mapred_jhs_principal_name = mapred_jhs_principal_name.replace('_HOST', hostname.lower())
+    mapred_jhs_keytab = config['configurations']['mapred-site']['mapreduce.jobhistory.keytab']
+    mapred_jaas_file = os.path.join(config_dir, 'mapred_jaas.conf')
 
 yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
 yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
index 768411c..0591511 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
@@ -192,7 +192,24 @@ def yarn(name=None, config_dir=None):
          owner=params.yarn_user,
          group=params.user_group,
          content=Template("yarn_jaas.conf.j2")
-     )
+    )
+    if params.has_ats:
+      File(os.path.join(config_dir, 'yarn_ats_jaas.conf'),
+           owner=params.yarn_user,
+           group=params.user_group,
+           content=Template("yarn_ats_jaas.conf.j2")
+      )
+    File(os.path.join(config_dir, 'yarn_nm_jaas.conf'),
+         owner=params.yarn_user,
+         group=params.user_group,
+         content=Template("yarn_nm_jaas.conf.j2")
+    )
+    if params.has_hs:
+      File(os.path.join(config_dir, 'mapred_jaas.conf'),
+           owner=params.mapred_user,
+           group=params.user_group,
+           content=Template("mapred_jaas.conf.j2")
+      )
   else:
     File(os.path.join(config_dir, 'taskcontroller.cfg'),
          owner=params.tc_owner,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2
new file mode 100644
index 0000000..67f4bcb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2
@@ -0,0 +1,28 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+com.sun.security.jgss.krb5.initiate {
+  com.sun.security.auth.module.Krb5LoginModule required
+  renewTGT=false
+  doNotPrompt=true
+  useKeyTab=true
+  keyTab="{{mapred_jhs_keytab}}"
+  principal="{{mapred_jhs_principal_name}}"
+  storeKey=true
+  useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2
new file mode 100644
index 0000000..55308e8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{yarn_timelineservice_keytab}}"
+    principal="{{yarn_timelineservice_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
index 483c815..99f0a1b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
@@ -23,4 +23,14 @@ Client {
   useTicketCache=false
   keyTab="{{rm_keytab}}"
   principal="{{rm_principal_name}}";
-};
\ No newline at end of file
+};
+com.sun.security.jgss.krb5.initiate {
+  com.sun.security.auth.module.Krb5LoginModule required
+  renewTGT=false
+  doNotPrompt=true
+  useKeyTab=true
+  keyTab="{{rm_keytab}}"
+  principal="{{rm_principal_name}}"
+  storeKey=true
+  useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2
new file mode 100644
index 0000000..b501c82
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+    com.sun.security.auth.module.Krb5LoginModule required
+    renewTGT=false
+    doNotPrompt=true
+    useKeyTab=true
+    keyTab="{{nodemanager_keytab}}"
+    principal="{{nodemanager_principal_name}}"
+    storeKey=true
+    useTicketCache=false;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
index 869f44a..67d33db 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
@@ -32,7 +32,9 @@ export HADOOP_JOB_HISTORYSERVER_HEAPSIZE={{jobhistory_heapsize}}
 
 export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
 
-#export HADOOP_JOB_HISTORYSERVER_OPTS=
+{% if security_enabled %}
+export HADOOP_JOB_HISTORYSERVER_OPTS="-Djava.security.auth.login.config={{mapred_jaas_file}}  -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
 #export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored.  $HADOOP_MAPRED_HOME/logs by default.
 #export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
 #export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
index d2b3671..45e137c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
@@ -90,8 +90,8 @@ JDK_DEPENDED_OPTS="-XX:PermSize=128m -XX:MaxPermSize=128m"
       
 {% if security_enabled %}
 export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} $JDK_DEPENDED_OPTS"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} $JDK_DEPENDED_OPTS"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
 export PHOENIX_QUERYSERVER_OPTS="$PHOENIX_QUERYSERVER_OPTS -Djava.security.auth.login.config={{queryserver_jaas_config_file}}"
 {% else %}
 export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 1bfd2fe..eb04aa4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -81,6 +81,13 @@ export HADOOP_SECONDARYNAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOf
 export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
 {% endif %}
 
+{% if security_enabled %}
+export HADOOP_NAMENODE_OPTS="$HADOOP_NAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_SECONDARYNAMENODE_OPTS="$HADOOP_SECONDARYNAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_DATANODE_OPTS="$HADOOP_DATANODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_dn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_JOURNALNODE_OPTS="$HADOOP_JOURNALNODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_jn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
+
 HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
 HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
index 190684c..9bfa2fe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
@@ -90,8 +90,9 @@
       # Specify the JVM options to be used when starting the ResourceManager.
       # These options will be appended to the options specified as YARN_OPTS
       # and therefore may override any similar flags set in YARN_OPTS
-      #export YARN_RESOURCEMANAGER_OPTS=
-
+      {% if security_enabled %}
+      export YARN_RESOURCEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_jaas_file}}"
+      {% endif %}
       # Node Manager specific parameters
 
       # Specify the max Heapsize for the NodeManager using a numerical value
@@ -112,10 +113,16 @@
       # or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
       export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}
 
+      {% if security_enabled %}
+      export YARN_TIMELINESERVER_OPTS="-Djava.security.auth.login.config={{yarn_ats_jaas_file}}"
+      {% endif %}
+
       # Specify the JVM options to be used when starting the NodeManager.
       # These options will be appended to the options specified as YARN_OPTS
       # and therefore may override any similar flags set in YARN_OPTS
-      #export YARN_NODEMANAGER_OPTS=
+      {% if security_enabled %}
+      export YARN_NODEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_nm_jaas_file}}"
+      {% endif %}
 
       # so that filenames w/ spaces are handled correctly in loops below
       IFS=
@@ -153,6 +160,9 @@
       fi
       YARN_OPTS="$YARN_OPTS -Dyarn.policy.file=$YARN_POLICYFILE"
       YARN_OPTS="$YARN_OPTS -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
+      {% if security_enabled %}
+      YARN_OPTS="$YARN_OPTS -Djavax.security.auth.useSubjectCredsOnly=false"
+      {% endif %}
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
index 1bfd2fe..eb04aa4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
@@ -81,6 +81,13 @@ export HADOOP_SECONDARYNAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOf
 export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
 {% endif %}
 
+{% if security_enabled %}
+export HADOOP_NAMENODE_OPTS="$HADOOP_NAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_SECONDARYNAMENODE_OPTS="$HADOOP_SECONDARYNAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_DATANODE_OPTS="$HADOOP_DATANODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_dn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_JOURNALNODE_OPTS="$HADOOP_JOURNALNODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_jn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
+
 HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
 HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
index 9d504db..4814efe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
@@ -82,6 +82,13 @@
       export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
       {% endif %}
 
+      {% if security_enabled %}
+      export HADOOP_NAMENODE_OPTS="$HADOOP_NAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+      export HADOOP_SECONDARYNAMENODE_OPTS="$HADOOP_SECONDARYNAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+      export HADOOP_DATANODE_OPTS="$HADOOP_DATANODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_dn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+      export HADOOP_JOURNALNODE_OPTS="$HADOOP_JOURNALNODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_jn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+      {% endif %}
+
       HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
       HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
index a143660..b044cb6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
@@ -31,7 +31,9 @@
 
       export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
 
-      #export HADOOP_JOB_HISTORYSERVER_OPTS=
+      {% if security_enabled %}
+      export HADOOP_JOB_HISTORYSERVER_OPTS="-Djava.security.auth.login.config={{mapred_jaas_file}}  -Djavax.security.auth.useSubjectCredsOnly=false"
+      {% endif %}
       #export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored.  $HADOOP_MAPRED_HOME/logs by default.
       #export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
       #export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.


[32/50] [abbrv] ambari git commit: Updated team page. (yusaku)

Posted by nc...@apache.org.
Updated team page. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eb7fbbdc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eb7fbbdc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eb7fbbdc

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: eb7fbbdc6ecdc6a0b1af26ddfd66d4b30d528a4f
Parents: 4aaf259
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Jun 8 15:22:11 2017 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Jun 8 15:22:11 2017 -0700

----------------------------------------------------------------------
 docs/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/eb7fbbdc/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index a9c48d6..95478e4 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -239,7 +239,7 @@
             <email>avijayan@apache.org</email>
             <timezone>-8</timezone>
             <roles>
-                <role>Committer</role>
+                <role>PMC</role>
             </roles>
             <organization>
                 Hortonworks


[33/50] [abbrv] ambari git commit: AMBARI-21205 Make ToggleKerberos and AddDeleteService experimental features (Duc Le via rzang)

Posted by nc...@apache.org.
AMBARI-21205 Make ToggleKerberos and AddDeleteService experimental features (Duc Le via rzang)

Change-Id: I578ddcebbad34eefd40abef4b6524fc69b4cb8fc


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/57bb1365
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/57bb1365
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/57bb1365

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 57bb1365e414c1f110d2d142fa198fb8e043af95
Parents: eb7fbbd
Author: Richard Zang <rz...@apache.org>
Authored: Thu Jun 8 15:38:38 2017 -0700
Committer: Richard Zang <rz...@apache.org>
Committed: Thu Jun 8 16:16:02 2017 -0700

----------------------------------------------------------------------
 ambari-web/app/config.js                        |  6 ++--
 ambari-web/app/routes/add_service_routes.js     |  2 +-
 ambari-web/app/routes/main.js                   |  2 +-
 .../app/templates/main/admin/kerberos.hbs       | 34 +++++++++++---------
 .../main/service/all_services_actions.hbs       |  6 ++--
 ambari-web/app/views/main/admin.js              | 14 ++++----
 .../main/admin/stack_upgrade/services_view.js   |  2 +-
 ambari-web/app/views/main/menu.js               | 16 +++++----
 ambari-web/app/views/main/service/item.js       |  2 +-
 .../admin/stack_upgrade/services_view_test.js   |  1 +
 10 files changed, 49 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/config.js b/ambari-web/app/config.js
index e7190be..b87b36f 100644
--- a/ambari-web/app/config.js
+++ b/ambari-web/app/config.js
@@ -87,9 +87,11 @@ App.supports = {
   addingNewRepository: false,
   kerberosStackAdvisor: true,
   logCountVizualization: false,
-  manageJournalNode: true,
   createAlerts: false,
-  enabledWizardForHostOrderedUpgrade: true
+  enabledWizardForHostOrderedUpgrade: true,
+  manageJournalNode: true,
+  enableToggleKerberos: true,
+  enableAddDeleteServices: true
 };
 
 if (App.enableExperimental) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/routes/add_service_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_service_routes.js b/ambari-web/app/routes/add_service_routes.js
index 1615f0d..75b3586 100644
--- a/ambari-web/app/routes/add_service_routes.js
+++ b/ambari-web/app/routes/add_service_routes.js
@@ -24,7 +24,7 @@ module.exports = App.WizardRoute.extend({
   route: '/service/add',
 
   enter: function (router) {
-    if (App.isAuthorized('SERVICE.ADD_DELETE_SERVICES')) {
+    if (App.isAuthorized('SERVICE.ADD_DELETE_SERVICES') && App.supports.enableAddDeleteServices) {
       // `getSecurityStatus` call is required to retrieve information related to kerberos type: Manual or automated kerberos
       router.get('mainController').isLoading.call(router.get('clusterController'),'isClusterNameLoaded').done(function () {
         App.router.get('mainAdminKerberosController').getSecurityStatus().always(function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 30cc8aa..7ed18de 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -460,7 +460,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
 
       route: '/kerberos',
       enter: function (router, transition) {
-        if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS')) {
+        if (router.get('loggedIn') && (!App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') || !App.supports.enableToggleKerberos)) {
           router.transitionTo('main.dashboard.index');
         }
       },

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/templates/main/admin/kerberos.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos.hbs b/ambari-web/app/templates/main/admin/kerberos.hbs
index e7bb618..2b41122 100644
--- a/ambari-web/app/templates/main/admin/kerberos.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos.hbs
@@ -20,20 +20,22 @@
     <div>
       <p class="text-success">{{t admin.security.enabled}}
         {{#isAuthorized "CLUSTER.TOGGLE_KERBEROS"}}
-          <button class="btn btn-padding btn-warning admin-disable-security-btn" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action notifySecurityOffPopup target="controller"}}>{{t admin.kerberos.button.disable}} </button>
-          {{#unless isManualKerberos}}
-            <button class="btn btn-success" id="regenerate-keytabs" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action regenerateKeytabs target="controller"}}>
-              <i class="glyphicon glyphicon-repeat"></i> {{t admin.kerberos.button.regenerateKeytabs}}</button>
-            {{#if App.isCredentialStorePersistent}}
-              <button class="btn btn-primary" {{action showManageKDCCredentialsPopup target="controller"}}>{{t admin.kerberos.credentials.store.menu.label}}</button>
+            {{#if App.supports.enableToggleKerberos}}
+              <button class="btn btn-padding btn-warning admin-disable-security-btn" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action notifySecurityOffPopup target="controller"}}>{{t admin.kerberos.button.disable}} </button>
+              {{#unless isManualKerberos}}
+                <button class="btn btn-success" id="regenerate-keytabs" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action regenerateKeytabs target="controller"}}>
+                  <i class="glyphicon glyphicon-repeat"></i> {{t admin.kerberos.button.regenerateKeytabs}}</button>
+                {{#if App.isCredentialStorePersistent}}
+                  <button class="btn btn-primary" {{action showManageKDCCredentialsPopup target="controller"}}>{{t admin.kerberos.credentials.store.menu.label}}</button>
+                {{/if}}
+              {{/unless}}
+              <br/>
+              {{#unless isEditMode}}
+                <a href="#" {{action makeConfigsEditable target="controller"}} class="pull-right">
+                  {{t common.edit}}
+                </a>
+              {{/unless}}
             {{/if}}
-          {{/unless}}
-          <br/>
-          {{#unless isEditMode}}
-            <a href="#" {{action makeConfigsEditable target="controller"}} class="pull-right">
-              {{t common.edit}}
-            </a>
-          {{/unless}}
         {{/isAuthorized}}
       </p>
     </div>
@@ -51,8 +53,10 @@
     <div>
       <p class="muted background-text">{{t admin.security.disabled}}
       {{#isAuthorized "CLUSTER.TOGGLE_KERBEROS"}}
-        <a class="btn btn-padding btn-success admin-enable-security-btn" {{action checkAndStartKerberosWizard target="controller"}}>{{t admin.kerberos.button.enable}} </a>
-        <br/>
+        {{#if App.supports.enableToggleKerberos}}
+          <a class="btn btn-padding btn-success admin-enable-security-btn" {{action checkAndStartKerberosWizard target="controller"}}>{{t admin.kerberos.button.enable}} </a>
+          <br/>
+        {{/if}}
       {{/isAuthorized}}
       </p>
     </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/templates/main/service/all_services_actions.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/all_services_actions.hbs b/ambari-web/app/templates/main/service/all_services_actions.hbs
index 3e87cb2..a9e122b 100644
--- a/ambari-web/app/templates/main/service/all_services_actions.hbs
+++ b/ambari-web/app/templates/main/service/all_services_actions.hbs
@@ -22,12 +22,14 @@
   </div>
   <ul class="dropdown-menu">
     {{#isAuthorized "SERVICE.ADD_DELETE_SERVICES"}}
-      <li {{bindAttr class="view.serviceController.isAllServicesInstalled:disabled"}}>
+      {{#if App.supports.enableAddDeleteServices}}
+        <li {{bindAttr class="view.serviceController.isAllServicesInstalled:disabled"}}>
         <a href="#"
           {{bindAttr class="view.serviceController.isAllServicesInstalled:disabled"}}
           {{action gotoAddService target="view.serviceController"}}>
           <i class="glyphicon glyphicon-plus"></i> {{t services.service.add}}</a>
-      </li>
+        </li>
+      {{/if}}
     {{/isAuthorized}}
     {{#isAuthorized "SERVICE.START_STOP"}}
       <li class="divider"></li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/admin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin.js b/ambari-web/app/views/main/admin.js
index 509f380..05d0f56 100644
--- a/ambari-web/app/views/main/admin.js
+++ b/ambari-web/app/views/main/admin.js
@@ -39,12 +39,14 @@ App.MainAdminView = Em.View.extend({
       });
     }
     if (!App.get('isHadoopWindowsStack') && App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') || (App.get('upgradeInProgress') || App.get('upgradeHolding')) ) {
-      items.push({
-        name: 'kerberos',
-        url: 'adminKerberos.index',
-        label: Em.I18n.t('common.kerberos'),
-        disabled: App.get('upgradeInProgress') || App.get('upgradeHolding')
-      });
+      if (App.supports.enableToggleKerberos) {
+        items.push({
+          name: 'kerberos',
+          url: 'adminKerberos.index',
+          label: Em.I18n.t('common.kerberos'),
+          disabled: App.get('upgradeInProgress') || App.get('upgradeHolding')
+        });
+      }
     }
     if ((App.isAuthorized('SERVICE.START_STOP, CLUSTER.MODIFY_CONFIGS') && App.isAuthorized('SERVICE.MANAGE_AUTO_START, CLUSTER.MANAGE_AUTO_START')) || (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
       if (App.supports.serviceAutoStart) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/services_view.js b/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
index f566814..25efffe 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
@@ -56,7 +56,7 @@ App.MainAdminStackServicesView = Em.View.extend({
    * @param event
    */
   goToAddService: function (event) {
-    if (!App.isAuthorized('SERVICE.ADD_DELETE_SERVICES')) {
+    if (!App.isAuthorized('SERVICE.ADD_DELETE_SERVICES') || !App.supports.enableAddDeleteServices) {
       return;
     } else if (event.context == "KERBEROS") {
       App.router.get('mainAdminKerberosController').checkAndStartKerberosWizard();

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/menu.js b/ambari-web/app/views/main/menu.js
index 4bb53ae..32c4f6f 100644
--- a/ambari-web/app/views/main/menu.js
+++ b/ambari-web/app/views/main/menu.js
@@ -118,13 +118,15 @@ App.MainSideMenuView = Em.CollectionView.extend({
           });
         }
         if (!App.get('isHadoopWindowsStack') && App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') || upg) {
-          categories.push({
-            name: 'kerberos',
-            url: 'kerberos/',
-            label: Em.I18n.t('common.kerberos'),
-            disabled: App.get('upgradeInProgress') || App.get('upgradeHolding'),
-            href: router.urlFor('main.admin.adminKerberos')
-          });
+          if (App.supports.enableToggleKerberos) {
+            categories.push({
+              name: 'kerberos',
+              url: 'kerberos/',
+              label: Em.I18n.t('common.kerberos'),
+              disabled: App.get('upgradeInProgress') || App.get('upgradeHolding'),
+              href: router.urlFor('main.admin.adminKerberos')
+            });
+          }
         }
         if ((App.isAuthorized('SERVICE.START_STOP, CLUSTER.MODIFY_CONFIGS') && App.isAuthorized('SERVICE.MANAGE_AUTO_START, CLUSTER.MANAGE_AUTO_START')) || upg) {
           if (App.supports.serviceAutoStart) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index e25ade1..43d75e6 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -289,7 +289,7 @@ App.MainServiceItemView = Em.View.extend({
       options.push(actionMap.DOWNLOAD_CLIENT_CONFIGS);
     }
 
-    if (App.isAuthorized("SERVICE.ADD_DELETE_SERVICES")) {
+    if (App.isAuthorized("SERVICE.ADD_DELETE_SERVICES") && App.supports.enableAddDeleteServices) {
       options.push(actionMap.DELETE_SERVICE);
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
index 70d182c..da75cf2 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
@@ -34,6 +34,7 @@ describe('App.MainAdminStackServicesView', function () {
       sinon.stub(App.router, 'get').returns(mock);
       sinon.spy(mock, 'checkAndStartKerberosWizard');
       isAccessibleMock = sinon.stub(App, 'isAuthorized');
+      App.set('supports.enableAddDeleteServices', true);
     });
     afterEach(function() {
       App.get('router').transitionTo.restore();


[30/50] [abbrv] ambari git commit: AMBARI-19369. Add Kerberos HTTP SPNEGO authentication support to Hadoop/hbase/kafka/storm sinks (Qin Liu via rlevas)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 5702b57..d2968f8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -416,6 +416,16 @@ class TestDatanode(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
+    self.assertResourceCalled('File', conf_dir + '/hdfs_dn_jaas.conf',
+                              content = Template('hdfs_dn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', conf_dir + '/hdfs_nn_jaas.conf',
+                              content = Template('hdfs_nn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 2202661..ff8f92e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -230,6 +230,16 @@ class TestJournalnode(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+                              content = Template('hdfs_dn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+                              content = Template('hdfs_nn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -251,7 +261,6 @@ class TestJournalnode(RMFTestCase):
                               )
 
 
-
   @patch('time.sleep')
   def test_post_upgrade_restart(self, time_mock):
     # load the NN and JN JMX files so that the urllib2.urlopen mock has data

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 01149fb..a7233c7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -289,7 +289,7 @@ class TestNamenode(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assert_configure_secured()
+    self.assert_configure_secured(False)
     self.assertNoMoreResources()
 
 
@@ -302,7 +302,7 @@ class TestNamenode(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0,"")],
     )
-    self.assert_configure_secured()
+    self.assert_configure_secured(False)
     self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
                               owner = 'hdfs',
                               content = Template('exclude_hosts_list.j2'),
@@ -622,7 +622,7 @@ class TestNamenode(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assert_configure_secured()
+    self.assert_configure_secured(True)
     self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
                               owner = 'hdfs',
                               content = Template('exclude_hosts_list.j2'),
@@ -1153,7 +1153,7 @@ class TestNamenode(RMFTestCase):
                               cd_access='a'
                               )
 
-  def assert_configure_secured(self):
+  def assert_configure_secured(self, ha_enabled):
     self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
         create_parents = True,
     )
@@ -1177,6 +1177,22 @@ class TestNamenode(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+                              content = Template('hdfs_dn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+                              content = Template('hdfs_nn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    if ha_enabled:
+      self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_jn_jaas.conf',
+                                content = Template('hdfs_jn_jaas.conf.j2'),
+                                owner = 'hdfs',
+                                group = 'hadoop',
+                                )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index b8fee12..de425cd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -243,6 +243,16 @@ class TestNFSGateway(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+                              content = Template('hdfs_dn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+                              content = Template('hdfs_nn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 9e9366d..b3d7016 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -248,6 +248,16 @@ class TestSNamenode(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+                              content = Template('hdfs_dn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+                              content = Template('hdfs_nn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -274,4 +284,4 @@ class TestSNamenode(RMFTestCase):
                               mode = 0755,
                               create_parents = True,
                               cd_access='a'
-                              )
\ No newline at end of file
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index 127a045..0fe200c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -154,6 +154,21 @@ class TestZkfc(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+                              content = Template('hdfs_dn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+                              content = Template('hdfs_nn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_jn_jaas.conf',
+                              content = Template('hdfs_jn_jaas.conf.j2'),
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -381,4 +396,4 @@ class TestZkfc(RMFTestCase):
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
-    self.assertNoMoreResources()
\ No newline at end of file
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 545f0e6..53d16fd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -712,6 +712,16 @@ class TestHistoryServer(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+                              content = Template('yarn_nm_jaas.conf.j2'),
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+                              content = Template('mapred_jaas.conf.j2'),
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               owner = 'mapred',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index b05d9f2..7e06969 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -346,6 +346,16 @@ class TestMapReduce2Client(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+                              content = Template('yarn_nm_jaas.conf.j2'),
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+                              content = Template('mapred_jaas.conf.j2'),
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               owner = 'mapred',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 6fc5bae..dfbdd23 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -530,6 +530,16 @@ class TestNodeManager(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+                              content = Template('yarn_nm_jaas.conf.j2'),
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+                              content = Template('mapred_jaas.conf.j2'),
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               owner = 'mapred',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index ed5ee2c..5ccd6a4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -498,6 +498,16 @@ class TestResourceManager(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+                              content = Template('yarn_nm_jaas.conf.j2'),
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+                              content = Template('mapred_jaas.conf.j2'),
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               owner = 'mapred',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index 8873fbf..f71c93a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -346,6 +346,16 @@ class TestYarnClient(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+                              content = Template('yarn_nm_jaas.conf.j2'),
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+                              content = Template('mapred_jaas.conf.j2'),
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               owner = 'mapred',
                               group = 'hadoop',


[19/50] [abbrv] ambari git commit: AMBARI-21185. False positive unused import for nested class referenced only in Javadoc

Posted by nc...@apache.org.
AMBARI-21185. False positive unused import for nested class referenced only in Javadoc


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ff48a00
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ff48a00
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ff48a00

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 2ff48a00b155a65ac0daa55ac8f8be00774b158f
Parents: e71f49e
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Tue Jun 6 17:05:21 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Thu Jun 8 11:35:49 2017 +0200

----------------------------------------------------------------------
 .../controller/AmbariCustomCommandExecutionHelper.java       | 7 +++----
 .../apache/ambari/server/controller/KerberosHelperImpl.java  | 2 --
 .../server/controller/internal/UpgradeResourceProvider.java  | 5 -----
 .../listeners/upgrade/HostVersionOutOfSyncListener.java      | 2 --
 .../org/apache/ambari/server/orm/dao/HostVersionDAO.java     | 6 ++----
 .../org/apache/ambari/server/orm/dao/ServiceConfigDAO.java   | 1 -
 .../server/serveraction/upgrades/FinalizeUpgradeAction.java  | 2 --
 .../main/java/org/apache/ambari/server/state/Cluster.java    | 6 +-----
 .../java/org/apache/ambari/server/state/ConfigFactory.java   | 8 --------
 .../java/org/apache/ambari/server/state/UpgradeContext.java  | 2 --
 .../java/org/apache/ambari/server/state/alert/AlertUri.java  | 2 +-
 .../server/state/stack/upgrade/RepositoryVersionHelper.java  | 4 +---
 .../internal/StackUpgradeConfigurationMergeTest.java         | 3 +--
 .../controller/internal/UpgradeResourceProviderTest.java     | 2 +-
 .../java/org/apache/ambari/server/orm/OrmTestHelper.java     | 2 --
 .../server/serveraction/upgrades/ConfigureActionTest.java    | 5 -----
 .../server/serveraction/upgrades/UpgradeActionTest.java      | 5 -----
 .../org/apache/ambari/server/state/cluster/ClusterTest.java  | 4 ++--
 .../apache/ambari/server/upgrade/UpgradeCatalogHelper.java   | 7 -------
 19 files changed, 12 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index cabb9d6..554ae1d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -1180,7 +1180,7 @@ public class AmbariCustomCommandExecutionHelper {
    *
    * @return the repo info
    *
-   * @deprecated use {@link #getCommandRepository(Cluster, Host)} instead.
+   * @deprecated use {@link #getCommandRepository(Cluster, ServiceComponent, Host)} instead.
    * @throws AmbariException if the repository information can not be obtained
    */
   @Deprecated
@@ -1305,7 +1305,7 @@ public class AmbariCustomCommandExecutionHelper {
 
   /**
    * Executed by two different representations of repos.  When we are comfortable with the new
-   * implemenation, this may be removed and called inline in {@link #getCommandRepository(Cluster, Host)}
+   * implementation, this may be removed and called inline in {@link #getCommandRepository(Cluster, ServiceComponent, Host)}
    *
    * @param cluster   the cluster to isolate the stack
    * @param component the component
@@ -1390,9 +1390,8 @@ public class AmbariCustomCommandExecutionHelper {
    *
    * @param actionExecContext  the context
    * @param cluster            the cluster for the command
-   * @param stackId            the effective stack id to use.
    *
-   * @return a wrapper of the imporant JSON structures to add to a stage
+   * @return a wrapper of the important JSON structures to add to a stage
    */
   public ExecuteCommandJson getCommandJson(ActionExecutionContext actionExecContext,
       Cluster cluster, RepositoryVersionEntity repositoryVersion) throws AmbariException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 7b6ac7e..87c826d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -2205,7 +2205,6 @@ public class KerberosHelperImpl implements KerberosHelper {
    * @param cluster         the relevant Cluster
    * @param requestId       the relevant request Id
    * @param requestContext  a String describing the stage
-   * @param clusterHostInfo JSON-encoded clusterHostInfo structure
    * @param commandParams   JSON-encoded command parameters
    * @param hostParams      JSON-encoded host parameters
    * @return a newly created Stage
@@ -2233,7 +2232,6 @@ public class KerberosHelperImpl implements KerberosHelper {
    * @param cluster           the relevant Cluster
    * @param requestId         the relevant request Id
    * @param requestContext    a String describing the stage
-   * @param clusterHostInfo   JSON-encoded clusterHostInfo structure
    * @param commandParams     JSON-encoded command parameters
    * @param hostParams        JSON-encoded host parameters
    * @param actionClass       The ServeAction class that implements the action to invoke

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index f8f8faa..60665f7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -1089,8 +1089,6 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
    * @param request upgrade request
    * @param entity a single of upgrade
    * @param task server-side task (if any)
-   * @param skippable if user can skip stage on failure
-   * @param allowRetry if user can retry running stage on failure
    * @param configUpgradePack a runtime-generated config upgrade pack that
    * contains all config change definitions from all stacks involved into
    * upgrade
@@ -1453,9 +1451,6 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
     /**
      * Constructor.
-     *
-     * @param from
-     * @param target
      */
     public RepositoryVersions(RepositoryVersionEntity from, RepositoryVersionEntity to) {
       fromRepositoryId = from.getId();

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListener.java
index 5536ef9..2eb89a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListener.java
@@ -64,8 +64,6 @@ import com.google.inject.persist.Transactional;
  * handles {@link org.apache.ambari.server.events.ServiceInstalledEvent} and
  * {@link org.apache.ambari.server.events.ServiceComponentInstalledEvent}
  * to update {@link org.apache.ambari.server.state.RepositoryVersionState}
- *
- * @see org.apache.ambari.server.state.Cluster#recalculateClusterVersionState(RepositoryVersionEntity)
  */
 @Singleton
 @EagerSingleton

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostVersionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostVersionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostVersionDAO.java
index ad34ec7..c05b06d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostVersionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostVersionDAO.java
@@ -40,7 +40,7 @@ import com.google.inject.persist.Transactional;
  * The {@link org.apache.ambari.server.orm.dao.HostVersionDAO} class manages the {@link org.apache.ambari.server.orm.entities.HostVersionEntity}
  * instances associated with a host. Each host can have multiple stack versions in {@link org.apache.ambari.server.state.RepositoryVersionState#INSTALLED}
  * which are installed, exactly one stack version that is either {@link org.apache.ambari.server.state.RepositoryVersionState#CURRENT} or
- * {@link org.apache.ambari.server.state.RepositoryVersionState#UPGRADING}.
+ * {@link org.apache.ambari.server.state.RepositoryVersionState#INSTALLING}.
  */
 @Singleton
 public class HostVersionDAO extends CrudDAO<HostVersionEntity, Long> {
@@ -150,9 +150,7 @@ public class HostVersionDAO extends CrudDAO<HostVersionEntity, Long> {
   /**
    * Retrieve the single host version for the given cluster, stack name, stack
    * version, and host name. <br/>
-   * This query is slow and not suitable for frequent use. <br/>
-   * Please, use {@link HostVersionDAO#findByClusterStackVersionAndHost(long, org.apache.ambari.server.state.StackId, java.lang.String, long)} <br/>
-   * It is ~50 times faster
+   * This query is slow and not suitable for frequent use.
    *
    * @param clusterName
    *          Cluster name

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
index 72666e5..6bfad54 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
@@ -153,7 +153,6 @@ public class ServiceConfigDAO {
    *          the cluster (not {@code null}).
    * @param stackId
    *          the stack (not {@code null}).
-   * @param service
    * @return all service configurations for the cluster and stack.
    */
   @RequiresSession

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 475a8c9..db0da9c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -95,8 +95,6 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
 
   /**
    * Execution path for upgrade.
-   * @param clusterName the name of the cluster the upgrade is for
-   * @param version     the target version of the upgrade
    * @return the command report
    */
   private CommandReport finalizeUpgrade(UpgradeContext upgradeContext)

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 4fddd8d..b4ebcd8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -179,11 +179,7 @@ public interface Cluster {
    * hosts into the correct state (which may not be
    * {@link RepositoryVersionState#INSTALLING}).
    * <p/>
-   * The difference between this method compared to
-   * {@link Cluster#mapHostVersions} is that it affects all hosts (not only
-   * missing hosts).
-   * <p/>
-   * Hosts that are in maintenance mode will be transititioned directly into
+   * Hosts that are in maintenance mode will be transitioned directly into
    * {@link RepositoryVersionState#OUT_OF_SYNC} instead. Hosts which do not need
    * the version distributed to them will move into the
    * {@link RepositoryVersionState#NOT_REQUIRED} state.

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
index dda5c9c..2879345 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
@@ -32,14 +32,6 @@ public interface ConfigFactory {
 
   /**
    * Creates a new {@link Config} object using provided values.
-   *
-   * @param cluster
-   * @param stackId
-   * @param type
-   * @param tag
-   * @param map
-   * @param mapAttributes
-   * @return
    */
   @Experimental(feature = ExperimentalFeature.MULTI_SERVICE,
       comment = "This constructor is only used for test compatibility and should be removed")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 67ffefd..3ecf64d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -397,8 +397,6 @@ public class UpgradeContext {
    *          the cluster that the upgrade is for
    * @param upgradeEntity
    *          the upgrade entity
-   * @param repoVersionDAO
-   *          the repository version DAO.
    */
   @AssistedInject
   public UpgradeContext(@Assisted Cluster cluster, @Assisted UpgradeEntity upgradeEntity,

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java
index 2df1dae..8223db5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertUri.java
@@ -83,7 +83,7 @@ public class AlertUri {
   /**
    * If present, then the component supports HA mode and the properties
    * contained within need to be checked to see if an HA URI is required to be
-   * constructed instead of using {@link #m_httpProperty} and
+   * constructed instead of using {@link #m_httpUri}, {@link #m_httpsUri} and
    * {@link #m_httpsProperty}.
    */
   @SerializedName("high_availability")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
index 802c876..471e30f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
@@ -244,10 +244,8 @@ public class RepositoryVersionHelper {
    * @param amc           the management controller.  Tests don't use the same instance that gets injected.
    * @param repoVersion   the repository version
    * @param osFamily      the os family
-   * @param services      the set of services to check for packages
-   * @param repos         the list of individual repositories
+   * @param servicesOnHost the set of services to check for packages
    * @return a Map<String, String> to use in
-   * @throws SystemException
    */
   public Map<String, String> buildRoleParams(AmbariManagementController amc, RepositoryVersionEntity repoVersion, String osFamily, Set<String> servicesOnHost)
     throws SystemException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
index 97b94c3..a37e4f5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
@@ -69,7 +69,6 @@ import org.apache.ambari.server.state.UpgradeHelper;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
-import org.apache.ambari.server.state.stack.UpgradePack;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.easymock.Capture;
@@ -91,7 +90,7 @@ import com.google.inject.assistedinject.FactoryModuleBuilder;
 
 /**
  * Tests that
- * {@link UpgradeResourceProvider#applyStackAndProcessConfigurations(String, Cluster, String, Direction, UpgradePack, String)}
+ * {@link UpgradeHelper#updateDesiredRepositoriesAndConfigs}
  * works correctly.
  */
 public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 8f59c07..014ab42 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -1658,7 +1658,7 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
   }
 
   /**
-   * Tests that commands created for {@link StageWrapper.Type#RU_TASKS} set the
+   * Tests that commands created for {@link org.apache.ambari.server.state.stack.upgrade.StageWrapper.Type#RU_TASKS} set the
    * service and component on the {@link ExecutionCommand}.
    * <p/>
    * Without this, commands of this type would not be able to determine which

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
index eff6a9a..550cc9f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
@@ -618,8 +618,6 @@ public class OrmTestHelper {
    * Convenient method to create or to get repository version for given cluster.  The repository
    * version string is based on the cluster's stack version.
    *
-   * @param stackId stack object
-   * @param version stack version
    * @return repository version
    */
   public RepositoryVersionEntity getOrCreateRepositoryVersion(Cluster cluster) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
index 478b126..8ebff2c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -1793,11 +1793,6 @@ public class ConfigureActionTest {
 
   /**
    * Creates an upgrade and associates it with the cluster.
-   *
-   * @param cluster
-   * @param sourceRepo
-   * @param targetRepo
-   * @throws Exception
    */
   private UpgradeEntity createUpgrade(Cluster cluster, RepositoryVersionEntity repositoryVersion)
       throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index b6fa6d2..115b25f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -565,11 +565,6 @@ public class UpgradeActionTest {
 
   /**
    * Creates an upgrade and associates it with the cluster.
-   *
-   * @param cluster
-   * @param sourceRepo
-   * @param targetRepo
-   * @throws Exception
    */
   private UpgradeEntity createUpgrade(Cluster cluster, RepositoryVersionEntity repositoryVersion)
       throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 336db36..6423164 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -1519,7 +1519,7 @@ public class ClusterTest {
    * Tests that hosts can be correctly transitioned into the "INSTALLING" state.
    * This method also tests that hosts in MM will not be transitioned, as per
    * the contract of
-   * {@link Cluster#transitionHostsToInstalling(ClusterVersionEntity, RepositoryVersionEntity, org.apache.ambari.server.state.repository.VersionDefinitionXml, boolean)}.
+   * {@link Cluster#transitionHostsToInstalling(RepositoryVersionEntity, org.apache.ambari.server.state.repository.VersionDefinitionXml, boolean)}.
    *
    * @throws Exception
    */
@@ -2072,7 +2072,7 @@ public class ClusterTest {
   }
 
   /**
-   * Tests that {@link Cluster#applyLatestConfigurations(StackId)} sets the
+   * Tests that {@link Cluster#applyLatestConfigurations(StackId, String)} sets the
    * right configs to enabled when setting them to a prior stack which has
    * several configs.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ff48a00/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
index 1a9eed4..58d4c3e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
@@ -168,13 +168,6 @@ public class UpgradeCatalogHelper {
 
   /**
    * Adds a host component for a given service and host.
-   *
-   * @param injector
-   * @param clusterEntity
-   * @param clusterServiceEntity
-   * @param hostEntity
-   * @param componentName
-   * @param repositoryversion
    */
   @Transactional
   protected void addComponent(Injector injector, ClusterEntity clusterEntity,


[05/50] [abbrv] ambari git commit: AMBARI-21054. Add ppc as a new OS for User. (aonishuk)

Posted by nc...@apache.org.
AMBARI-21054. Add ppc as a new OS for User. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bc90de2e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bc90de2e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bc90de2e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bc90de2e9843f41229d86f4dad6accbb66163500
Parents: 119d262
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Jun 6 13:58:40 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Jun 6 13:58:40 2017 +0300

----------------------------------------------------------------------
 .../src/main/python/ambari_commons/os_check.py   |  6 ++++++
 .../ambari_commons/resources/os_family.json      | 10 ++++++++++
 .../core/providers/__init__.py                   | 19 +++++++++++++------
 .../libraries/providers/__init__.py              |  3 ---
 .../AmbariManagementControllerImpl.java          | 16 ++++++++++++----
 .../ambari/server/state/stack/OsFamily.java      | 12 ++++++++++++
 .../resources/stacks/HDP/2.6/repos/repoinfo.xml  | 14 ++++++++++++++
 7 files changed, 67 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-common/src/main/python/ambari_commons/os_check.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/os_check.py b/ambari-common/src/main/python/ambari_commons/os_check.py
index b430c86..0416584 100644
--- a/ambari-common/src/main/python/ambari_commons/os_check.py
+++ b/ambari-common/src/main/python/ambari_commons/os_check.py
@@ -86,6 +86,9 @@ def _is_oracle_linux():
 def _is_redhat_linux():
   return _IS_REDHAT_LINUX
 
+def _is_powerpc():
+  return platform.processor() == 'powerpc' or platform.machine().startswith('ppc')
+
 def advanced_check(distribution):
   distribution = list(distribution)
   if os.path.exists(SYSTEM_RELEASE_FILE):
@@ -255,6 +258,9 @@ class OSCheck:
 
     if operatingSystem == '':
       raise Exception("Cannot detect os type. Exiting...")
+
+    if _is_powerpc():
+      operatingSystem += '-ppc'
     
     return operatingSystem
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-common/src/main/python/ambari_commons/resources/os_family.json
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/resources/os_family.json b/ambari-common/src/main/python/ambari_commons/resources/os_family.json
index 859ce56..b9cdbf6 100644
--- a/ambari-common/src/main/python/ambari_commons/resources/os_family.json
+++ b/ambari-common/src/main/python/ambari_commons/resources/os_family.json
@@ -22,6 +22,16 @@
           7
         ]
       },
+      "redhat-ppc": {
+        "extends" : "redhat",
+        "distro": [
+          "redhat-ppc",
+          "centos-ppc"
+        ],
+        "versions": [
+          6
+        ]
+      },
       "debian": {
         "extends" : "ubuntu",
         "distro": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-common/src/main/python/resource_management/core/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/__init__.py b/ambari-common/src/main/python/resource_management/core/providers/__init__.py
index 21ae0d5..ac6ee16 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/__init__.py
@@ -24,7 +24,7 @@ __all__ = ["Provider", "find_provider"]
 
 from resource_management.core.exceptions import Fail
 from resource_management.libraries.providers import PROVIDERS as LIBRARY_PROVIDERS
-
+from ambari_commons.os_check import OSCheck
 
 class Provider(object):
   def __init__(self, resource):
@@ -53,9 +53,6 @@ PROVIDERS = dict(
   ubuntu=dict(
     Package="resource_management.core.providers.package.apt.AptProvider",
   ),
-  debian=dict(
-    Package="resource_management.core.providers.package.apt.AptProvider",
-  ),
   winsrv=dict(
     Service="resource_management.core.providers.windows.service.ServiceProvider",
     ServiceConfig="resource_management.core.providers.windows.service.ServiceConfigProvider",
@@ -82,9 +79,19 @@ PROVIDERS = dict(
 def find_provider(env, resource, class_path=None):
   if not class_path:
     providers = [PROVIDERS, LIBRARY_PROVIDERS]
+
     for provider in providers:
-      if resource in provider[env.system.os_family]:
-        class_path = provider[env.system.os_family][resource]
+
+      if env.system.os_family in provider:
+        os_family_provider = provider[env.system.os_family]
+      else:
+        # take care of os extensions
+        for family in provider:
+          if OSCheck.is_in_family(env.system.os_family, family):
+            os_family_provider = provider[family]
+
+      if resource in os_family_provider:
+        class_path = os_family_provider[resource]
         break
       if resource in provider["default"]:
         class_path = provider["default"][resource]

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index bd7c98a..770f9b5 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -33,9 +33,6 @@ PROVIDERS = dict(
   ubuntu=dict(
     Repository="resource_management.libraries.providers.repository.UbuntuRepositoryProvider",
   ),
-  debian=dict(
-    Repository="resource_management.libraries.providers.repository.UbuntuRepositoryProvider",
-  ),
   winsrv=dict(
     Msi="resource_management.libraries.providers.msi.MsiProvider"
   ),

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 186a19e..b67b45b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -187,6 +187,7 @@ import org.apache.ambari.server.state.quicklinksprofile.QuickLinkVisibilityContr
 import org.apache.ambari.server.state.quicklinksprofile.QuickLinksProfile;
 import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
+import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.state.stack.RepositoryXml;
 import org.apache.ambari.server.state.stack.WidgetLayout;
 import org.apache.ambari.server.state.stack.WidgetLayoutInfo;
@@ -313,6 +314,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   private ExtensionLinkDAO linkDAO;
   @Inject
   private StackDAO stackDAO;
+  @Inject
+  private OsFamily osFamily;
 
   /**
    * The KerberosHelper to help setup for enabling for disabling Kerberos
@@ -2621,13 +2624,18 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   }
 
   private List<ServiceOsSpecific> getOSSpecificsByFamily(Map<String, ServiceOsSpecific> osSpecifics, String osFamily) {
-    List<ServiceOsSpecific> foundedOSSpecifics = new ArrayList<>();
+    List<ServiceOsSpecific> foundOSSpecifics = new ArrayList<>();
     for (Entry<String, ServiceOsSpecific> osSpecific : osSpecifics.entrySet()) {
-      if (osSpecific.getKey().contains(osFamily)) {
-        foundedOSSpecifics.add(osSpecific.getValue());
+      String[] osFamilyNames = osSpecific.getKey().split("\\s*,\\s*");
+
+      for(String osFamilyName:osFamilyNames) {
+        if (this.osFamily.isVersionedOsFamilyExtendedByVersionedFamily(osFamily, osFamilyName)) {
+          foundOSSpecifics.add(osSpecific.getValue());
+          break;
+        }
       }
     }
-    return foundedOSSpecifics;
+    return foundOSSpecifics;
   }
 
   private ActionExecutionContext getActionExecutionContext

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
index bd89be0..b0d961c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
@@ -208,6 +208,18 @@ public class OsFamily {
       return (currentFamily.equals(family) || getOsFamilyParent(currentFamily)!=null && isFamilyExtendedByFamily(getOsFamilyParent(currentFamily), family));
     }
 
+    public boolean isVersionedOsFamilyExtendedByVersionedFamily(String currentVersionedFamily, String versionedFamily) {
+      Map<String,String> pos = this.parse_os(currentVersionedFamily);
+      String currentFamily = pos.get(OS_DISTRO);
+      String currentFamilyVersion = pos.get(OS_VERSION);
+
+      pos = this.parse_os(versionedFamily);
+      String family = pos.get(OS_DISTRO);
+      String familyVersion = pos.get(OS_VERSION);
+
+      return currentFamilyVersion.equals(familyVersion) && isFamilyExtendedByFamily(currentFamily, family);
+    }
+
     private String getOsFamilyParent(String osFamily) {
       return osMap.get(osFamily).getExtendsFamily();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc90de2e/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
index 81a70a5..23441f5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
@@ -31,6 +31,20 @@
       <unique>false</unique>
     </repo>
   </os>
+  <os family="redhat-ppc6">
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.6.0.3</baseurl>
+      <repoid>HDP-2.6</repoid>
+      <reponame>HDP</reponame>
+      <unique>true</unique>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6</baseurl>
+      <repoid>HDP-UTILS-1.1.0.21</repoid>
+      <reponame>HDP-UTILS</reponame>
+      <unique>false</unique>
+    </repo>
+  </os>
   <os family="redhat7">
     <repo>
       <baseurl>http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.6.0.3</baseurl>


[15/50] [abbrv] ambari git commit: AMBARI-21017. Misc tab at Customize Services Page has 2 Livy User tags and no helper popup to identify if its Livy/Livy2 (Bikas Saha via smohanty)

Posted by nc...@apache.org.
AMBARI-21017. Misc tab at Customize Services Page has 2 Livy User tags and no helper popup to identify if its Livy/Livy2 (Bikas Saha via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e801b419
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e801b419
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e801b419

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e801b4199dd8a38ea50dd5b172fa85f49ee56966
Parents: b772b4d
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Jun 7 12:50:18 2017 -0700
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Jun 7 12:50:27 2017 -0700

----------------------------------------------------------------------
 .../stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e801b419/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
index 751d005..201aac9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-env.xml
@@ -22,7 +22,7 @@
 <configuration supports_adding_forbidden="true">
     <property>
         <name>livy2_user</name>
-        <display-name>Livy User</display-name>
+        <display-name>Livy2 User</display-name>
         <value>livy</value>
         <property-type>USER</property-type>
         <value-attributes>
@@ -33,7 +33,7 @@
     </property>
     <property>
         <name>livy2_group</name>
-        <display-name>Livy Group</display-name>
+        <display-name>Livy2 Group</display-name>
         <value>livy</value>
         <property-type>GROUP</property-type>
         <description>livy group</description>


[41/50] [abbrv] ambari git commit: AMBARI-20686. Allow compilation with maven >=2.2 (Arnaud Launay via via smohanty)

Posted by nc...@apache.org.
AMBARI-20686. Allow compilation with maven >=2.2 (Arnaud Launay via via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/13798c27
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/13798c27
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/13798c27

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 13798c27a4787886816aaac1a688cdb208d864b4
Parents: 62f4432
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sun Jun 11 08:12:08 2017 -0700
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sun Jun 11 08:12:56 2017 -0700

----------------------------------------------------------------------
 .../odpi-ambari-mpack/src/main/assemblies/odpi-ambari-mpack.xml    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/13798c27/contrib/management-packs/odpi-ambari-mpack/src/main/assemblies/odpi-ambari-mpack.xml
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/assemblies/odpi-ambari-mpack.xml b/contrib/management-packs/odpi-ambari-mpack/src/main/assemblies/odpi-ambari-mpack.xml
index 5dd3db4..f7bd433 100644
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/assemblies/odpi-ambari-mpack.xml
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/assemblies/odpi-ambari-mpack.xml
@@ -17,7 +17,7 @@
   limitations under the License.
 -->
 <assembly>
-  <id></id>
+  <id>odpi-ambari-mpack</id>
   <formats>
     <format>dir</format>
     <format>tar.gz</format>


[20/50] [abbrv] ambari git commit: AMBARI-21196. Fix ambari-solr-plugin folder structure (oleewere)

Posted by nc...@apache.org.
AMBARI-21196. Fix ambari-solr-plugin folder structure (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3146a197
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3146a197
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3146a197

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 3146a1970f356427010a2275462a6ad93c9ce126
Parents: 2ff48a0
Author: oleewere <ol...@gmail.com>
Authored: Wed Jun 7 19:54:17 2017 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Jun 8 13:29:46 2017 +0200

----------------------------------------------------------------------
 .../InfraKerberosHostValidator.java             |  54 --
 .../InfraRuleBasedAuthorizationPlugin.java      | 542 -------------------
 .../InfraUserRolesLookupStrategy.java           |  49 --
 .../security/InfraKerberosHostValidator.java    |  54 ++
 .../InfraRuleBasedAuthorizationPlugin.java      | 542 +++++++++++++++++++
 .../security/InfraUserRolesLookupStrategy.java  |  49 ++
 6 files changed, 645 insertions(+), 645 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3146a197/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraKerberosHostValidator.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraKerberosHostValidator.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraKerberosHostValidator.java
deleted file mode 100644
index 4a47a89..0000000
--- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraKerberosHostValidator.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.security;
-
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.collections.MapUtils;
-import org.apache.hadoop.security.authentication.server.AuthenticationToken;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-
-import java.security.Principal;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Validate that the user has the right access based on the hostname in the kerberos principal
- */
-public class InfraKerberosHostValidator {
-
-  public boolean validate(Principal principal, Map<String, Set<String>> userVsHosts, Map<String, String> userVsHostRegex) {
-    if (principal instanceof AuthenticationToken) {
-      AuthenticationToken authenticationToken = (AuthenticationToken) principal;
-      KerberosName kerberosName = new KerberosName(authenticationToken.getName());
-      String hostname = kerberosName.getHostName();
-      String serviceUserName = kerberosName.getServiceName();
-      if (MapUtils.isNotEmpty(userVsHostRegex)) {
-        String regex = userVsHostRegex.get(serviceUserName);
-        return hostname.matches(regex);
-      }
-      if (MapUtils.isNotEmpty(userVsHosts)) {
-        Set<String> hosts = userVsHosts.get(serviceUserName);
-        if (CollectionUtils.isNotEmpty(hosts)) {
-          return hosts.contains(hostname);
-        }
-      }
-    }
-    return true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3146a197/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraRuleBasedAuthorizationPlugin.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraRuleBasedAuthorizationPlugin.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraRuleBasedAuthorizationPlugin.java
deleted file mode 100644
index 2f1a558..0000000
--- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraRuleBasedAuthorizationPlugin.java
+++ /dev/null
@@ -1,542 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.security;
-
-import com.google.common.collect.ImmutableSet;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.security.Principal;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.security.AuthorizationContext;
-import org.apache.solr.security.AuthorizationPlugin;
-import org.apache.solr.security.AuthorizationResponse;
-import org.apache.solr.security.ConfigEditablePlugin;
-import org.apache.solr.util.CommandOperation;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static java.util.Collections.singleton;
-import static org.apache.solr.common.params.CommonParams.NAME;
-import static org.apache.solr.common.util.Utils.getDeepCopy;
-import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue;
-import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue;
-
-/**
- * Modified copy of solr.RuleBasedAuthorizationPlugin to handle role - permission mappings with KereberosPlugin
- * Added 2 new JSON map: (precedence: user-host-regex > user-host)
- * 1. "user-host": user host mappings (array) for hostname validation
- * 2. "user-host-regex": user host regex mapping (string) for hostname validation
- */
-public class InfraRuleBasedAuthorizationPlugin implements AuthorizationPlugin, ConfigEditablePlugin {
-
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  private final Map<String, Set<String>> usersVsRoles = new HashMap<>();
-  private final Map<String, WildCardSupportMap> mapping = new HashMap<>();
-  private final List<Permission> permissions = new ArrayList<>();
-  private final Map<String, Set<String>> userVsHosts = new HashMap<>();
-  private final Map<String, String> userVsHostRegex = new HashMap<>();
-
-  private final InfraUserRolesLookupStrategy infraUserRolesLookupStrategy = new InfraUserRolesLookupStrategy();
-  private final InfraKerberosHostValidator infraKerberosDomainValidator = new InfraKerberosHostValidator();
-
-  private static class WildCardSupportMap extends HashMap<String, List<Permission>> {
-    final Set<String> wildcardPrefixes = new HashSet<>();
-
-    @Override
-    public List<Permission> put(String key, List<Permission> value) {
-      if (key != null && key.endsWith("/*")) {
-        key = key.substring(0, key.length() - 2);
-        wildcardPrefixes.add(key);
-      }
-      return super.put(key, value);
-    }
-
-    @Override
-    public List<Permission> get(Object key) {
-      List<Permission> result = super.get(key);
-      if (key == null || result != null) return result;
-      if (!wildcardPrefixes.isEmpty()) {
-        for (String s : wildcardPrefixes) {
-          if (key.toString().startsWith(s)) {
-            List<Permission> l = super.get(s);
-            if (l != null) {
-              result = result == null ? new ArrayList<Permission>() : new ArrayList<Permission>(result);
-              result.addAll(l);
-            }
-          }
-        }
-      }
-      return result;
-    }
-  }
-
-  @Override
-  public AuthorizationResponse authorize(AuthorizationContext context) {
-    List<AuthorizationContext.CollectionRequest> collectionRequests = context.getCollectionRequests();
-    if (context.getRequestType() == AuthorizationContext.RequestType.ADMIN) {
-      MatchStatus flag = checkCollPerm(mapping.get(null), context);
-      return flag.rsp;
-    }
-
-    for (AuthorizationContext.CollectionRequest collreq : collectionRequests) {
-      //check permissions for each collection
-      MatchStatus flag = checkCollPerm(mapping.get(collreq.collectionName), context);
-      if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag.rsp;
-    }
-    //check wildcard (all=*) permissions.
-    MatchStatus flag = checkCollPerm(mapping.get("*"), context);
-    return flag.rsp;
-  }
-
-  private MatchStatus checkCollPerm(Map<String, List<Permission>> pathVsPerms,
-                                    AuthorizationContext context) {
-    if (pathVsPerms == null) return MatchStatus.NO_PERMISSIONS_FOUND;
-
-    String path = context.getResource();
-    MatchStatus flag = checkPathPerm(pathVsPerms.get(path), context);
-    if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag;
-    return checkPathPerm(pathVsPerms.get(null), context);
-  }
-
-  private MatchStatus checkPathPerm(List<Permission> permissions, AuthorizationContext context) {
-    if (permissions == null || permissions.isEmpty()) return MatchStatus.NO_PERMISSIONS_FOUND;
-    Principal principal = context.getUserPrincipal();
-    loopPermissions:
-    for (int i = 0; i < permissions.size(); i++) {
-      Permission permission = permissions.get(i);
-      if (permission.method != null && !permission.method.contains(context.getHttpMethod())) {
-        //this permissions HTTP method does not match this rule. try other rules
-        continue;
-      }
-      if(permission.predicate != null){
-        if(!permission.predicate.test(context)) continue ;
-      }
-
-      if (permission.params != null) {
-        for (Map.Entry<String, Object> e : permission.params.entrySet()) {
-          String paramVal = context.getParams().get(e.getKey());
-          Object val = e.getValue();
-          if (val instanceof List) {
-            if (!((List) val).contains(paramVal)) continue loopPermissions;
-          } else if (!Objects.equals(val, paramVal)) continue loopPermissions;
-        }
-      }
-
-      if (permission.role == null) {
-        //no role is assigned permission.That means everybody is allowed to access
-        return MatchStatus.PERMITTED;
-      }
-      if (principal == null) {
-        log.info("request has come without principal. failed permission {} ",permission);
-        //this resource needs a principal but the request has come without
-        //any credential.
-        return MatchStatus.USER_REQUIRED;
-      } else if (permission.role.contains("*")) {
-        return MatchStatus.PERMITTED;
-      }
-
-      for (String role : permission.role) {
-        Set<String> userRoles = infraUserRolesLookupStrategy.getUserRolesFromPrincipal(usersVsRoles, principal);
-        boolean validHostname = infraKerberosDomainValidator.validate(principal, userVsHosts, userVsHostRegex);
-        if (!validHostname) {
-          log.warn("Hostname is not valid for principal {}", principal);
-          return MatchStatus.FORBIDDEN;
-        }
-        if (userRoles != null && userRoles.contains(role)) return MatchStatus.PERMITTED;
-      }
-      log.info("This resource is configured to have a permission {}, The principal {} does not have the right role ", permission, principal);
-      return MatchStatus.FORBIDDEN;
-    }
-    log.debug("No permissions configured for the resource {} . So allowed to access", context.getResource());
-    return MatchStatus.NO_PERMISSIONS_FOUND;
-  }
-
-  @Override
-  public void init(Map<String, Object> initInfo) {
-    mapping.put(null, new WildCardSupportMap());
-    Map<String, Object> map = getMapValue(initInfo, "user-role");
-    for (Object o : map.entrySet()) {
-      Map.Entry e = (Map.Entry) o;
-      String roleName = (String) e.getKey();
-      usersVsRoles.put(roleName, readValueAsSet(map, roleName));
-    }
-    List<Map> perms = getListValue(initInfo, "permissions");
-    for (Map o : perms) {
-      Permission p;
-      try {
-        p = Permission.load(o);
-      } catch (Exception exp) {
-        log.error("Invalid permission ", exp);
-        continue;
-      }
-      permissions.add(p);
-      add2Mapping(p);
-    }
-    // adding user-host
-    Map<String, Object> userHostsMap = getMapValue(initInfo, "user-host");
-    for (Object userHost : userHostsMap.entrySet()) {
-      Map.Entry e = (Map.Entry) userHost;
-      String roleName = (String) e.getKey();
-      userVsHosts.put(roleName, readValueAsSet(userHostsMap, roleName));
-    }
-    // adding user-host-regex
-    Map<String, Object> userHostRegexMap = getMapValue(initInfo, "user-host-regex");
-    for (Map.Entry<String, Object> entry : userHostRegexMap.entrySet()) {
-      userVsHostRegex.put(entry.getKey(), entry.getValue().toString());
-    }
-
-  }
-
-  //this is to do optimized lookup of permissions for a given collection/path
-  private void add2Mapping(Permission permission) {
-    for (String c : permission.collections) {
-      WildCardSupportMap m = mapping.get(c);
-      if (m == null) mapping.put(c, m = new WildCardSupportMap());
-      for (String path : permission.path) {
-        List<Permission> perms = m.get(path);
-        if (perms == null) m.put(path, perms = new ArrayList<>());
-        perms.add(permission);
-      }
-    }
-  }
-
-  /**
-   * read a key value as a set. if the value is a single string ,
-   * return a singleton set
-   *
-   * @param m   the map from which to lookup
-   * @param key the key with which to do lookup
-   */
-  static Set<String> readValueAsSet(Map m, String key) {
-    Set<String> result = new HashSet<>();
-    Object val = m.get(key);
-    if (val == null) {
-      if("collection".equals(key)){
-        //for collection collection: null means a core admin/ collection admin request
-        // otherwise it means a request where collection name is ignored
-        return m.containsKey(key) ? singleton((String) null) : singleton("*");
-      }
-      return null;
-    }
-    if (val instanceof Collection) {
-      Collection list = (Collection) val;
-      for (Object o : list) result.add(String.valueOf(o));
-    } else if (val instanceof String) {
-      result.add((String) val);
-    } else {
-      throw new RuntimeException("Bad value for : " + key);
-    }
-    return result.isEmpty() ? null : Collections.unmodifiableSet(result);
-  }
-
-  @Override
-  public void close() throws IOException { }
-
-  static class Permission {
-    String name;
-    Set<String> path, role, collections, method;
-    Map<String, Object> params;
-    Predicate<AuthorizationContext> predicate;
-    Map originalConfig;
-
-    private Permission() {
-    }
-
-    static Permission load(Map m) {
-      Permission p = new Permission();
-      p.originalConfig = new LinkedHashMap<>(m);
-      String name = (String) m.get(NAME);
-      if (!m.containsKey("role")) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "role not specified");
-      p.role = readValueAsSet(m, "role");
-      if (well_known_permissions.containsKey(name)) {
-        HashSet<String> disAllowed = new HashSet<>(knownKeys);
-        disAllowed.remove("role");//these are the only
-        disAllowed.remove(NAME);//allowed keys for well-known permissions
-        for (String s : disAllowed) {
-          if (m.containsKey(s))
-            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, s + " is not a valid key for the permission : " + name);
-        }
-        p.predicate = (Predicate<AuthorizationContext>) ((Map) well_known_permissions.get(name)).get(Predicate.class.getName());
-        m = well_known_permissions.get(name);
-      }
-      p.name = name;
-      p.path = readSetSmart(name, m, "path");
-      p.collections = readSetSmart(name, m, "collection");
-      p.method = readSetSmart(name, m, "method");
-      p.params = (Map<String, Object>) m.get("params");
-      return p;
-    }
-
-    @Override
-    public String toString() {
-      return Utils.toJSONString(originalConfig);
-    }
-
-    static final Set<String> knownKeys = ImmutableSet.of("collection", "role", "params", "path", "method", NAME);
-  }
-
-  enum MatchStatus {
-    USER_REQUIRED(AuthorizationResponse.PROMPT),
-    NO_PERMISSIONS_FOUND(AuthorizationResponse.OK),
-    PERMITTED(AuthorizationResponse.OK),
-    FORBIDDEN(AuthorizationResponse.FORBIDDEN);
-
-    final AuthorizationResponse rsp;
-
-    MatchStatus(AuthorizationResponse rsp) {
-      this.rsp = rsp;
-    }
-  }
-
-  /**
-   * This checks for the defaults available other rules for the keys
-   */
-  private static Set<String> readSetSmart(String permissionName, Map m, String key) {
-    Set<String> set = readValueAsSet(m, key);
-    if (set == null && well_known_permissions.containsKey(permissionName)) {
-      set = readValueAsSet((Map) well_known_permissions.get(permissionName), key);
-    }
-    if ("method".equals(key)) {
-      if (set != null) {
-        for (String s : set) if (!HTTP_METHODS.contains(s)) return null;
-      }
-      return set;
-    }
-    return set == null ? singleton((String)null) : set;
-  }
-
-  @Override
-  public Map<String, Object> edit(Map<String, Object> latestConf, List<CommandOperation> commands) {
-    for (CommandOperation op : commands) {
-      OPERATION operation = null;
-      for (OPERATION o : OPERATION.values()) {
-        if (o.name.equals(op.name)) {
-          operation = o;
-          break;
-        }
-      }
-      if (operation == null) {
-        op.unknownOperation();
-        return null;
-      }
-      latestConf = operation.edit(latestConf, op);
-      if (latestConf == null) return null;
-
-    }
-    return latestConf;
-  }
-
-  enum OPERATION {
-    SET_USER_ROLE("set-user-role") {
-      @Override
-      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
-        Map<String, Object> roleMap = getMapValue(latestConf, "user-role");
-        Map<String, Object> map = op.getDataMap();
-        if (op.hasError()) return null;
-        for (Map.Entry<String, Object> e : map.entrySet()) {
-          if (e.getValue() == null) {
-            roleMap.remove(e.getKey());
-            continue;
-          }
-          if (e.getValue() instanceof String || e.getValue() instanceof List) {
-            roleMap.put(e.getKey(), e.getValue());
-          } else {
-            op.addError("Unexpected value ");
-            return null;
-          }
-        }
-        return latestConf;
-      }
-    },
-    SET_PERMISSION("set-permission") {
-      @Override
-      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
-        String name = op.getStr(NAME);
-        Map<String, Object> dataMap = op.getDataMap();
-        if (op.hasError()) return null;
-        dataMap = getDeepCopy(dataMap, 3);
-        String before = (String) dataMap.remove("before");
-        for (String key : dataMap.keySet()) {
-          if (!Permission.knownKeys.contains(key)) op.addError("Unknown key, " + key);
-        }
-        try {
-          Permission.load(dataMap);
-        } catch (Exception e) {
-          op.addError(e.getMessage());
-          return null;
-        }
-        List<Map> permissions = getListValue(latestConf, "permissions");
-        List<Map> permissionsCopy = new ArrayList<>();
-        boolean added = false;
-        for (Map e : permissions) {
-          Object n = e.get(NAME);
-          if (n.equals(before) || n.equals(name)) {
-            added = true;
-            permissionsCopy.add(dataMap);
-          }
-          if (!n.equals(name)) permissionsCopy.add(e);
-        }
-        if (!added && before != null) {
-          op.addError("Invalid 'before' :" + before);
-          return null;
-        }
-        if (!added) permissionsCopy.add(dataMap);
-        latestConf.put("permissions", permissionsCopy);
-        return latestConf;
-      }
-    },
-    UPDATE_PERMISSION("update-permission") {
-      @Override
-      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
-        String name = op.getStr(NAME);
-        if (op.hasError()) return null;
-        for (Map permission : (List<Map>) getListValue(latestConf, "permissions")) {
-          if (name.equals(permission.get(NAME))) {
-            LinkedHashMap copy = new LinkedHashMap<>(permission);
-            copy.putAll(op.getDataMap());
-            op.setCommandData(copy);
-            return SET_PERMISSION.edit(latestConf, op);
-          }
-        }
-        op.addError("No such permission " + name);
-        return null;
-      }
-    },
-    DELETE_PERMISSION("delete-permission") {
-      @Override
-      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
-        List<String> names = op.getStrs("");
-        if (names == null || names.isEmpty()) {
-          op.addError("Invalid command");
-          return null;
-        }
-        names = new ArrayList<>(names);
-        List<Map> copy = new ArrayList<>();
-        List<Map> p = getListValue(latestConf, "permissions");
-        for (Map map : p) {
-          Object n = map.get(NAME);
-          if (names.contains(n)) {
-            names.remove(n);
-            continue;
-          } else {
-            copy.add(map);
-          }
-        }
-        if (!names.isEmpty()) {
-          op.addError("Unknown permission name(s) " + names);
-          return null;
-        }
-        latestConf.put("permissions", copy);
-        return latestConf;
-      }
-    };
-
-    public abstract Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op);
-
-    public final String name;
-
-    OPERATION(String s) {
-      this.name = s;
-    }
-
-    public static OPERATION get(String name) {
-      for (OPERATION o : values()) if (o.name.equals(name)) return o;
-      return null;
-    }
-  }
-
-  public static final Set<String> HTTP_METHODS = ImmutableSet.of("GET", "POST", "DELETE", "PUT", "HEAD");
-
-  private static final Map<String, Map<String,Object>> well_known_permissions = (Map) Utils.fromJSONString(
-    "    { " +
-      "    security-edit :{" +
-      "      path:['/admin/authentication','/admin/authorization']," +
-      "      collection:null," +
-      "      method:POST }," +
-      "    security-read :{" +
-      "      path:['/admin/authentication','/admin/authorization']," +
-      "      collection:null," +
-      "      method:GET}," +
-      "    schema-edit :{" +
-      "      method:POST," +
-      "      path:'/schema/*'}," +
-      "    collection-admin-edit :{" +
-      "  collection:null," +
-      "      path:'/admin/collections'}," +
-      "    collection-admin-read :{" +
-      "      collection:null," +
-      "      path:'/admin/collections'}," +
-      "    schema-read :{" +
-      "      method:GET," +
-      "      path:'/schema/*'}," +
-      "    config-read :{" +
-      "      method:GET," +
-      "      path:'/config/*'}," +
-      "    update :{" +
-      "      path:'/update/*'}," +
-      "    read :{" +
-      "      path:['/select', '/get','/browse','/tvrh','/terms','/clustering','/elevate', '/export','/spell','/clustering']}," +
-      "    config-edit:{" +
-      "      method:POST," +
-      "      path:'/config/*'}," +
-      "    all:{collection:['*', null]}" +
-      "}");
-
-  static {
-    ((Map) well_known_permissions.get("collection-admin-edit")).put(Predicate.class.getName(), getCollectionActionPredicate(true));
-    ((Map) well_known_permissions.get("collection-admin-read")).put(Predicate.class.getName(), getCollectionActionPredicate(false));
-  }
-
-  private static Predicate<AuthorizationContext> getCollectionActionPredicate(final boolean isEdit) {
-    return new Predicate<AuthorizationContext>() {
-      @Override
-      public boolean test(AuthorizationContext context) {
-        String action = context.getParams().get("action");
-        if (action == null) return false;
-        CollectionParams.CollectionAction collectionAction = CollectionParams.CollectionAction.get(action);
-        if (collectionAction == null) return false;
-        return isEdit ? collectionAction.isWrite : !collectionAction.isWrite;
-      }
-    };
-  }
-
-
-  public static void main(String[] args) {
-    System.out.println(Utils.toJSONString(well_known_permissions));
-
-  }
-
-  public interface Predicate<T> {
-
-    boolean test(T t);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3146a197/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraUserRolesLookupStrategy.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraUserRolesLookupStrategy.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraUserRolesLookupStrategy.java
deleted file mode 100644
index a54e4ad..0000000
--- a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org.apache.ambari.infra.security/InfraUserRolesLookupStrategy.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.infra.security;
-
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.hadoop.security.authentication.server.AuthenticationToken;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-
-import java.security.Principal;
-import java.util.Map;
-import java.util.Set;
-
-
-/**
- * Strategy class to get roles with the principal name (in a specific format e.g.: 'name@DOMAIN')
- * in case of KerberosPlugin is used for authentication
- */
-public class InfraUserRolesLookupStrategy {
-
-  public Set<String> getUserRolesFromPrincipal(Map<String, Set<String>> usersVsRoles, Principal principal) {
-    if (principal instanceof AuthenticationToken) {
-      AuthenticationToken authenticationToken = (AuthenticationToken) principal;
-      KerberosName kerberosName = new KerberosName(authenticationToken.getName());
-      Set<String> rolesResult = usersVsRoles.get(String.format("%s@%s", kerberosName.getServiceName(), kerberosName.getRealm()));
-      if (CollectionUtils.isEmpty(rolesResult)) {
-        rolesResult = usersVsRoles.get(principal.getName());
-      }
-      return rolesResult;
-    } else {
-      return usersVsRoles.get(principal.getName());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3146a197/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraKerberosHostValidator.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraKerberosHostValidator.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraKerberosHostValidator.java
new file mode 100644
index 0000000..4a47a89
--- /dev/null
+++ b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraKerberosHostValidator.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.security;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.apache.hadoop.security.authentication.server.AuthenticationToken;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+
+import java.security.Principal;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Validate that the user has the right access based on the hostname in the kerberos principal
+ */
+public class InfraKerberosHostValidator {
+
+  public boolean validate(Principal principal, Map<String, Set<String>> userVsHosts, Map<String, String> userVsHostRegex) {
+    if (principal instanceof AuthenticationToken) {
+      AuthenticationToken authenticationToken = (AuthenticationToken) principal;
+      KerberosName kerberosName = new KerberosName(authenticationToken.getName());
+      String hostname = kerberosName.getHostName();
+      String serviceUserName = kerberosName.getServiceName();
+      if (MapUtils.isNotEmpty(userVsHostRegex)) {
+        String regex = userVsHostRegex.get(serviceUserName);
+        return hostname.matches(regex);
+      }
+      if (MapUtils.isNotEmpty(userVsHosts)) {
+        Set<String> hosts = userVsHosts.get(serviceUserName);
+        if (CollectionUtils.isNotEmpty(hosts)) {
+          return hosts.contains(hostname);
+        }
+      }
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3146a197/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraRuleBasedAuthorizationPlugin.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraRuleBasedAuthorizationPlugin.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraRuleBasedAuthorizationPlugin.java
new file mode 100644
index 0000000..2f1a558
--- /dev/null
+++ b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraRuleBasedAuthorizationPlugin.java
@@ -0,0 +1,542 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.security;
+
+import com.google.common.collect.ImmutableSet;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.security.Principal;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.security.AuthorizationContext;
+import org.apache.solr.security.AuthorizationPlugin;
+import org.apache.solr.security.AuthorizationResponse;
+import org.apache.solr.security.ConfigEditablePlugin;
+import org.apache.solr.util.CommandOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.util.Collections.singleton;
+import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.util.Utils.getDeepCopy;
+import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue;
+import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue;
+
+/**
+ * Modified copy of solr.RuleBasedAuthorizationPlugin to handle role - permission mappings with KereberosPlugin
+ * Added 2 new JSON map: (precedence: user-host-regex > user-host)
+ * 1. "user-host": user host mappings (array) for hostname validation
+ * 2. "user-host-regex": user host regex mapping (string) for hostname validation
+ */
+public class InfraRuleBasedAuthorizationPlugin implements AuthorizationPlugin, ConfigEditablePlugin {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final Map<String, Set<String>> usersVsRoles = new HashMap<>();
+  private final Map<String, WildCardSupportMap> mapping = new HashMap<>();
+  private final List<Permission> permissions = new ArrayList<>();
+  private final Map<String, Set<String>> userVsHosts = new HashMap<>();
+  private final Map<String, String> userVsHostRegex = new HashMap<>();
+
+  private final InfraUserRolesLookupStrategy infraUserRolesLookupStrategy = new InfraUserRolesLookupStrategy();
+  private final InfraKerberosHostValidator infraKerberosDomainValidator = new InfraKerberosHostValidator();
+
+  private static class WildCardSupportMap extends HashMap<String, List<Permission>> {
+    final Set<String> wildcardPrefixes = new HashSet<>();
+
+    @Override
+    public List<Permission> put(String key, List<Permission> value) {
+      if (key != null && key.endsWith("/*")) {
+        key = key.substring(0, key.length() - 2);
+        wildcardPrefixes.add(key);
+      }
+      return super.put(key, value);
+    }
+
+    @Override
+    public List<Permission> get(Object key) {
+      List<Permission> result = super.get(key);
+      if (key == null || result != null) return result;
+      if (!wildcardPrefixes.isEmpty()) {
+        for (String s : wildcardPrefixes) {
+          if (key.toString().startsWith(s)) {
+            List<Permission> l = super.get(s);
+            if (l != null) {
+              result = result == null ? new ArrayList<Permission>() : new ArrayList<Permission>(result);
+              result.addAll(l);
+            }
+          }
+        }
+      }
+      return result;
+    }
+  }
+
+  @Override
+  public AuthorizationResponse authorize(AuthorizationContext context) {
+    List<AuthorizationContext.CollectionRequest> collectionRequests = context.getCollectionRequests();
+    if (context.getRequestType() == AuthorizationContext.RequestType.ADMIN) {
+      MatchStatus flag = checkCollPerm(mapping.get(null), context);
+      return flag.rsp;
+    }
+
+    for (AuthorizationContext.CollectionRequest collreq : collectionRequests) {
+      //check permissions for each collection
+      MatchStatus flag = checkCollPerm(mapping.get(collreq.collectionName), context);
+      if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag.rsp;
+    }
+    //check wildcard (all=*) permissions.
+    MatchStatus flag = checkCollPerm(mapping.get("*"), context);
+    return flag.rsp;
+  }
+
+  private MatchStatus checkCollPerm(Map<String, List<Permission>> pathVsPerms,
+                                    AuthorizationContext context) {
+    if (pathVsPerms == null) return MatchStatus.NO_PERMISSIONS_FOUND;
+
+    String path = context.getResource();
+    MatchStatus flag = checkPathPerm(pathVsPerms.get(path), context);
+    if (flag != MatchStatus.NO_PERMISSIONS_FOUND) return flag;
+    return checkPathPerm(pathVsPerms.get(null), context);
+  }
+
+  private MatchStatus checkPathPerm(List<Permission> permissions, AuthorizationContext context) {
+    if (permissions == null || permissions.isEmpty()) return MatchStatus.NO_PERMISSIONS_FOUND;
+    Principal principal = context.getUserPrincipal();
+    loopPermissions:
+    for (int i = 0; i < permissions.size(); i++) {
+      Permission permission = permissions.get(i);
+      if (permission.method != null && !permission.method.contains(context.getHttpMethod())) {
+        //this permissions HTTP method does not match this rule. try other rules
+        continue;
+      }
+      if(permission.predicate != null){
+        if(!permission.predicate.test(context)) continue ;
+      }
+
+      if (permission.params != null) {
+        for (Map.Entry<String, Object> e : permission.params.entrySet()) {
+          String paramVal = context.getParams().get(e.getKey());
+          Object val = e.getValue();
+          if (val instanceof List) {
+            if (!((List) val).contains(paramVal)) continue loopPermissions;
+          } else if (!Objects.equals(val, paramVal)) continue loopPermissions;
+        }
+      }
+
+      if (permission.role == null) {
+        //no role is assigned permission.That means everybody is allowed to access
+        return MatchStatus.PERMITTED;
+      }
+      if (principal == null) {
+        log.info("request has come without principal. failed permission {} ",permission);
+        //this resource needs a principal but the request has come without
+        //any credential.
+        return MatchStatus.USER_REQUIRED;
+      } else if (permission.role.contains("*")) {
+        return MatchStatus.PERMITTED;
+      }
+
+      for (String role : permission.role) {
+        Set<String> userRoles = infraUserRolesLookupStrategy.getUserRolesFromPrincipal(usersVsRoles, principal);
+        boolean validHostname = infraKerberosDomainValidator.validate(principal, userVsHosts, userVsHostRegex);
+        if (!validHostname) {
+          log.warn("Hostname is not valid for principal {}", principal);
+          return MatchStatus.FORBIDDEN;
+        }
+        if (userRoles != null && userRoles.contains(role)) return MatchStatus.PERMITTED;
+      }
+      log.info("This resource is configured to have a permission {}, The principal {} does not have the right role ", permission, principal);
+      return MatchStatus.FORBIDDEN;
+    }
+    log.debug("No permissions configured for the resource {} . So allowed to access", context.getResource());
+    return MatchStatus.NO_PERMISSIONS_FOUND;
+  }
+
+  @Override
+  public void init(Map<String, Object> initInfo) {
+    mapping.put(null, new WildCardSupportMap());
+    Map<String, Object> map = getMapValue(initInfo, "user-role");
+    for (Object o : map.entrySet()) {
+      Map.Entry e = (Map.Entry) o;
+      String roleName = (String) e.getKey();
+      usersVsRoles.put(roleName, readValueAsSet(map, roleName));
+    }
+    List<Map> perms = getListValue(initInfo, "permissions");
+    for (Map o : perms) {
+      Permission p;
+      try {
+        p = Permission.load(o);
+      } catch (Exception exp) {
+        log.error("Invalid permission ", exp);
+        continue;
+      }
+      permissions.add(p);
+      add2Mapping(p);
+    }
+    // adding user-host
+    Map<String, Object> userHostsMap = getMapValue(initInfo, "user-host");
+    for (Object userHost : userHostsMap.entrySet()) {
+      Map.Entry e = (Map.Entry) userHost;
+      String roleName = (String) e.getKey();
+      userVsHosts.put(roleName, readValueAsSet(userHostsMap, roleName));
+    }
+    // adding user-host-regex
+    Map<String, Object> userHostRegexMap = getMapValue(initInfo, "user-host-regex");
+    for (Map.Entry<String, Object> entry : userHostRegexMap.entrySet()) {
+      userVsHostRegex.put(entry.getKey(), entry.getValue().toString());
+    }
+
+  }
+
+  //this is to do optimized lookup of permissions for a given collection/path
+  private void add2Mapping(Permission permission) {
+    for (String c : permission.collections) {
+      WildCardSupportMap m = mapping.get(c);
+      if (m == null) mapping.put(c, m = new WildCardSupportMap());
+      for (String path : permission.path) {
+        List<Permission> perms = m.get(path);
+        if (perms == null) m.put(path, perms = new ArrayList<>());
+        perms.add(permission);
+      }
+    }
+  }
+
+  /**
+   * read a key value as a set. if the value is a single string ,
+   * return a singleton set
+   *
+   * @param m   the map from which to lookup
+   * @param key the key with which to do lookup
+   */
+  static Set<String> readValueAsSet(Map m, String key) {
+    Set<String> result = new HashSet<>();
+    Object val = m.get(key);
+    if (val == null) {
+      if("collection".equals(key)){
+        //for collection collection: null means a core admin/ collection admin request
+        // otherwise it means a request where collection name is ignored
+        return m.containsKey(key) ? singleton((String) null) : singleton("*");
+      }
+      return null;
+    }
+    if (val instanceof Collection) {
+      Collection list = (Collection) val;
+      for (Object o : list) result.add(String.valueOf(o));
+    } else if (val instanceof String) {
+      result.add((String) val);
+    } else {
+      throw new RuntimeException("Bad value for : " + key);
+    }
+    return result.isEmpty() ? null : Collections.unmodifiableSet(result);
+  }
+
+  @Override
+  public void close() throws IOException { }
+
+  static class Permission {
+    String name;
+    Set<String> path, role, collections, method;
+    Map<String, Object> params;
+    Predicate<AuthorizationContext> predicate;
+    Map originalConfig;
+
+    private Permission() {
+    }
+
+    static Permission load(Map m) {
+      Permission p = new Permission();
+      p.originalConfig = new LinkedHashMap<>(m);
+      String name = (String) m.get(NAME);
+      if (!m.containsKey("role")) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "role not specified");
+      p.role = readValueAsSet(m, "role");
+      if (well_known_permissions.containsKey(name)) {
+        HashSet<String> disAllowed = new HashSet<>(knownKeys);
+        disAllowed.remove("role");//these are the only
+        disAllowed.remove(NAME);//allowed keys for well-known permissions
+        for (String s : disAllowed) {
+          if (m.containsKey(s))
+            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, s + " is not a valid key for the permission : " + name);
+        }
+        p.predicate = (Predicate<AuthorizationContext>) ((Map) well_known_permissions.get(name)).get(Predicate.class.getName());
+        m = well_known_permissions.get(name);
+      }
+      p.name = name;
+      p.path = readSetSmart(name, m, "path");
+      p.collections = readSetSmart(name, m, "collection");
+      p.method = readSetSmart(name, m, "method");
+      p.params = (Map<String, Object>) m.get("params");
+      return p;
+    }
+
+    @Override
+    public String toString() {
+      return Utils.toJSONString(originalConfig);
+    }
+
+    static final Set<String> knownKeys = ImmutableSet.of("collection", "role", "params", "path", "method", NAME);
+  }
+
+  enum MatchStatus {
+    USER_REQUIRED(AuthorizationResponse.PROMPT),
+    NO_PERMISSIONS_FOUND(AuthorizationResponse.OK),
+    PERMITTED(AuthorizationResponse.OK),
+    FORBIDDEN(AuthorizationResponse.FORBIDDEN);
+
+    final AuthorizationResponse rsp;
+
+    MatchStatus(AuthorizationResponse rsp) {
+      this.rsp = rsp;
+    }
+  }
+
+  /**
+   * This checks for the defaults available other rules for the keys
+   */
+  private static Set<String> readSetSmart(String permissionName, Map m, String key) {
+    Set<String> set = readValueAsSet(m, key);
+    if (set == null && well_known_permissions.containsKey(permissionName)) {
+      set = readValueAsSet((Map) well_known_permissions.get(permissionName), key);
+    }
+    if ("method".equals(key)) {
+      if (set != null) {
+        for (String s : set) if (!HTTP_METHODS.contains(s)) return null;
+      }
+      return set;
+    }
+    return set == null ? singleton((String)null) : set;
+  }
+
+  @Override
+  public Map<String, Object> edit(Map<String, Object> latestConf, List<CommandOperation> commands) {
+    for (CommandOperation op : commands) {
+      OPERATION operation = null;
+      for (OPERATION o : OPERATION.values()) {
+        if (o.name.equals(op.name)) {
+          operation = o;
+          break;
+        }
+      }
+      if (operation == null) {
+        op.unknownOperation();
+        return null;
+      }
+      latestConf = operation.edit(latestConf, op);
+      if (latestConf == null) return null;
+
+    }
+    return latestConf;
+  }
+
+  enum OPERATION {
+    SET_USER_ROLE("set-user-role") {
+      @Override
+      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
+        Map<String, Object> roleMap = getMapValue(latestConf, "user-role");
+        Map<String, Object> map = op.getDataMap();
+        if (op.hasError()) return null;
+        for (Map.Entry<String, Object> e : map.entrySet()) {
+          if (e.getValue() == null) {
+            roleMap.remove(e.getKey());
+            continue;
+          }
+          if (e.getValue() instanceof String || e.getValue() instanceof List) {
+            roleMap.put(e.getKey(), e.getValue());
+          } else {
+            op.addError("Unexpected value ");
+            return null;
+          }
+        }
+        return latestConf;
+      }
+    },
+    SET_PERMISSION("set-permission") {
+      @Override
+      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
+        String name = op.getStr(NAME);
+        Map<String, Object> dataMap = op.getDataMap();
+        if (op.hasError()) return null;
+        dataMap = getDeepCopy(dataMap, 3);
+        String before = (String) dataMap.remove("before");
+        for (String key : dataMap.keySet()) {
+          if (!Permission.knownKeys.contains(key)) op.addError("Unknown key, " + key);
+        }
+        try {
+          Permission.load(dataMap);
+        } catch (Exception e) {
+          op.addError(e.getMessage());
+          return null;
+        }
+        List<Map> permissions = getListValue(latestConf, "permissions");
+        List<Map> permissionsCopy = new ArrayList<>();
+        boolean added = false;
+        for (Map e : permissions) {
+          Object n = e.get(NAME);
+          if (n.equals(before) || n.equals(name)) {
+            added = true;
+            permissionsCopy.add(dataMap);
+          }
+          if (!n.equals(name)) permissionsCopy.add(e);
+        }
+        if (!added && before != null) {
+          op.addError("Invalid 'before' :" + before);
+          return null;
+        }
+        if (!added) permissionsCopy.add(dataMap);
+        latestConf.put("permissions", permissionsCopy);
+        return latestConf;
+      }
+    },
+    UPDATE_PERMISSION("update-permission") {
+      @Override
+      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
+        String name = op.getStr(NAME);
+        if (op.hasError()) return null;
+        for (Map permission : (List<Map>) getListValue(latestConf, "permissions")) {
+          if (name.equals(permission.get(NAME))) {
+            LinkedHashMap copy = new LinkedHashMap<>(permission);
+            copy.putAll(op.getDataMap());
+            op.setCommandData(copy);
+            return SET_PERMISSION.edit(latestConf, op);
+          }
+        }
+        op.addError("No such permission " + name);
+        return null;
+      }
+    },
+    DELETE_PERMISSION("delete-permission") {
+      @Override
+      public Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op) {
+        List<String> names = op.getStrs("");
+        if (names == null || names.isEmpty()) {
+          op.addError("Invalid command");
+          return null;
+        }
+        names = new ArrayList<>(names);
+        List<Map> copy = new ArrayList<>();
+        List<Map> p = getListValue(latestConf, "permissions");
+        for (Map map : p) {
+          Object n = map.get(NAME);
+          if (names.contains(n)) {
+            names.remove(n);
+            continue;
+          } else {
+            copy.add(map);
+          }
+        }
+        if (!names.isEmpty()) {
+          op.addError("Unknown permission name(s) " + names);
+          return null;
+        }
+        latestConf.put("permissions", copy);
+        return latestConf;
+      }
+    };
+
+    public abstract Map<String, Object> edit(Map<String, Object> latestConf, CommandOperation op);
+
+    public final String name;
+
+    OPERATION(String s) {
+      this.name = s;
+    }
+
+    public static OPERATION get(String name) {
+      for (OPERATION o : values()) if (o.name.equals(name)) return o;
+      return null;
+    }
+  }
+
+  public static final Set<String> HTTP_METHODS = ImmutableSet.of("GET", "POST", "DELETE", "PUT", "HEAD");
+
+  private static final Map<String, Map<String,Object>> well_known_permissions = (Map) Utils.fromJSONString(
+    "    { " +
+      "    security-edit :{" +
+      "      path:['/admin/authentication','/admin/authorization']," +
+      "      collection:null," +
+      "      method:POST }," +
+      "    security-read :{" +
+      "      path:['/admin/authentication','/admin/authorization']," +
+      "      collection:null," +
+      "      method:GET}," +
+      "    schema-edit :{" +
+      "      method:POST," +
+      "      path:'/schema/*'}," +
+      "    collection-admin-edit :{" +
+      "  collection:null," +
+      "      path:'/admin/collections'}," +
+      "    collection-admin-read :{" +
+      "      collection:null," +
+      "      path:'/admin/collections'}," +
+      "    schema-read :{" +
+      "      method:GET," +
+      "      path:'/schema/*'}," +
+      "    config-read :{" +
+      "      method:GET," +
+      "      path:'/config/*'}," +
+      "    update :{" +
+      "      path:'/update/*'}," +
+      "    read :{" +
+      "      path:['/select', '/get','/browse','/tvrh','/terms','/clustering','/elevate', '/export','/spell','/clustering']}," +
+      "    config-edit:{" +
+      "      method:POST," +
+      "      path:'/config/*'}," +
+      "    all:{collection:['*', null]}" +
+      "}");
+
+  static {
+    ((Map) well_known_permissions.get("collection-admin-edit")).put(Predicate.class.getName(), getCollectionActionPredicate(true));
+    ((Map) well_known_permissions.get("collection-admin-read")).put(Predicate.class.getName(), getCollectionActionPredicate(false));
+  }
+
+  private static Predicate<AuthorizationContext> getCollectionActionPredicate(final boolean isEdit) {
+    return new Predicate<AuthorizationContext>() {
+      @Override
+      public boolean test(AuthorizationContext context) {
+        String action = context.getParams().get("action");
+        if (action == null) return false;
+        CollectionParams.CollectionAction collectionAction = CollectionParams.CollectionAction.get(action);
+        if (collectionAction == null) return false;
+        return isEdit ? collectionAction.isWrite : !collectionAction.isWrite;
+      }
+    };
+  }
+
+
+  public static void main(String[] args) {
+    System.out.println(Utils.toJSONString(well_known_permissions));
+
+  }
+
+  public interface Predicate<T> {
+
+    boolean test(T t);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3146a197/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraUserRolesLookupStrategy.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraUserRolesLookupStrategy.java b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraUserRolesLookupStrategy.java
new file mode 100644
index 0000000..a54e4ad
--- /dev/null
+++ b/ambari-infra/ambari-infra-solr-plugin/src/main/java/org/apache/ambari/infra/security/InfraUserRolesLookupStrategy.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.security;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.hadoop.security.authentication.server.AuthenticationToken;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+
+import java.security.Principal;
+import java.util.Map;
+import java.util.Set;
+
+
+/**
+ * Strategy class to get roles with the principal name (in a specific format e.g.: 'name@DOMAIN')
+ * in case of KerberosPlugin is used for authentication
+ */
+public class InfraUserRolesLookupStrategy {
+
+  public Set<String> getUserRolesFromPrincipal(Map<String, Set<String>> usersVsRoles, Principal principal) {
+    if (principal instanceof AuthenticationToken) {
+      AuthenticationToken authenticationToken = (AuthenticationToken) principal;
+      KerberosName kerberosName = new KerberosName(authenticationToken.getName());
+      Set<String> rolesResult = usersVsRoles.get(String.format("%s@%s", kerberosName.getServiceName(), kerberosName.getRealm()));
+      if (CollectionUtils.isEmpty(rolesResult)) {
+        rolesResult = usersVsRoles.get(principal.getName());
+      }
+      return rolesResult;
+    } else {
+      return usersVsRoles.get(principal.getName());
+    }
+  }
+}


[45/50] [abbrv] ambari git commit: AMBARI-19149. Code cleanup: concatenation in debug messages, unnecessary toString calls

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
index 7904593..4c63d1e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AppCookieManager.java
@@ -24,8 +24,6 @@ import java.security.Principal;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.http.Header;
 import org.apache.http.HeaderElement;
 import org.apache.http.HttpEntity;
@@ -40,6 +38,8 @@ import org.apache.http.client.methods.HttpUriRequest;
 import org.apache.http.client.params.AuthPolicy;
 import org.apache.http.impl.auth.SPNegoSchemeFactory;
 import org.apache.http.impl.client.DefaultHttpClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Handles SPNego authentication as a client of hadoop service, caches
@@ -57,7 +57,7 @@ public class AppCookieManager {
   private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
 
   private Map<String, String> endpointCookieMap = new ConcurrentHashMap<>();
-  private static Log LOG = LogFactory.getLog(AppCookieManager.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AppCookieManager.class);
 
   /**
    * Utility method to exercise AppCookieManager directly
@@ -136,7 +136,7 @@ public class AppCookieManager {
     hadoopAuthCookie = HADOOP_AUTH_EQ + quote(hadoopAuthCookie);
     setAppCookie(endpoint, hadoopAuthCookie);
     if (LOG.isInfoEnabled()) {
-      LOG.info("Successful SPNego authentication to URL:" + uri.toString());
+      LOG.info("Successful SPNego authentication to URL:" + uri);
     }
     return hadoopAuthCookie;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
index 8c68a12..135eefe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseProvider.java
@@ -309,10 +309,7 @@ public abstract class BaseProvider {
 
     if (contains) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Setting property for resource"
-            + ", resourceType=" + resource.getType()
-            + ", propertyId=" + propertyId
-            + ", value=" + value);
+        LOG.debug("Setting property for resource, resourceType={}, propertyId={}, value={}", resource.getType(), propertyId, value);
       }
 
       // If the value is a Map then set all of its entries as properties
@@ -335,10 +332,7 @@ public abstract class BaseProvider {
       }
 
       if (!contains && LOG.isDebugEnabled()) {
-        LOG.debug("Skipping property for resource as not in requestedIds"
-            + ", resourceType=" + resource.getType()
-            + ", propertyId=" + propertyId
-            + ", value=" + value);
+        LOG.debug("Skipping property for resource as not in requestedIds, resourceType={}, propertyId={}, value={}", resource.getType(), propertyId, value);
       }
     }
     return contains;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 7ebefdd..e93b2f7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -450,8 +450,7 @@ public class BlueprintConfigurationProcessor {
       String newValue = trimmingStrategy.trim(oldValue);
 
       if (!newValue.equals(oldValue)){
-        LOG.debug(String.format("Changing value for config %s property %s from [%s] to [%s]",
-          configType, propertyName, oldValue, newValue));
+        LOG.debug("Changing value for config {} property {} from [{}] to [{}]", configType, propertyName, oldValue, newValue);
         clusterConfig.setProperty(configType, propertyName, newValue);
       }
     }
@@ -2850,8 +2849,8 @@ public class BlueprintConfigurationProcessor {
             ensureProperty(configuration, "core-site", String.format(proxyUserGroups, user), "*", configTypesUpdated);
           }
         } else {
-          LOG.debug("setMissingConfigurations: no user configuration found for type = " + configType +
-                  ".  This may be caused by an error in the blueprint configuration.");
+          LOG.debug("setMissingConfigurations: no user configuration found for type = {}.  This may be caused by an error in the blueprint configuration.",
+            configType);
         }
 
       }
@@ -2908,7 +2907,7 @@ public class BlueprintConfigurationProcessor {
 
         Map<String, String> configProperties = stack.getConfigurationProperties(blueprintService, configType);
         for(Map.Entry<String, String> entry: configProperties.entrySet()) {
-          LOG.debug("ADD property " + configType + " " + entry.getKey() + " " + entry.getValue());
+          LOG.debug("ADD property {} {} {}", configType, entry.getKey(), entry.getValue());
           ensureProperty(configuration, configType, entry.getKey(), entry.getValue(), configTypesUpdated);
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
index 181d323..21cf16c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
@@ -249,7 +249,7 @@ public class ClientConfigResourceProvider extends AbstractControllerResourceProv
           if (componentMap.size() == 1) {
             throw new SystemException("No configuration files defined for the component " + componentInfo.getName());
           } else {
-            LOG.debug(String.format("No configuration files defined for the component %s",componentInfo.getName()));
+            LOG.debug("No configuration files defined for the component {}", componentInfo.getName());
             continue;
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
index 1949d06..c5c4ca1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
@@ -130,9 +130,7 @@ public class ClusterControllerImpl implements ClusterController {
 
     if (provider != null) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Using resource provider "
-            + provider.getClass().getName()
-            + " for request type " + type.toString());
+        LOG.debug("Using resource provider {} for request type {}", provider.getClass().getName(), type);
       }
       // make sure that the providers can satisfy the request
       checkProperties(type, request, predicate);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index cc3234b..265b89b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -272,8 +272,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
 
     Set<Resource> resources = new HashSet<>();
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Found clusters matching getClusters request"
-          + ", clusterResponseCount=" + responses.size());
+      LOG.debug("Found clusters matching getClusters request, clusterResponseCount={}", responses.size());
     }
 
     // Allow internal call to bypass permissions check.
@@ -291,8 +290,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
       setResourceProperty(resource, CLUSTER_VERSION_PROPERTY_ID, response.getDesiredStackVersion(), requestedIds);
 
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Adding ClusterResponse to resource"
-            + ", clusterResponse=" + response.toString());
+        LOG.debug("Adding ClusterResponse to resource, clusterResponse={}", response);
       }
 
       resources.add(resource);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompatibleRepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompatibleRepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompatibleRepositoryVersionResourceProvider.java
index 73696c7..f05ffbe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompatibleRepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompatibleRepositoryVersionResourceProvider.java
@@ -165,7 +165,7 @@ public class CompatibleRepositoryVersionResourceProvider extends ReadOnlyResourc
           if (compatibleRepositoryVersionsMap.containsKey(repositoryVersionEntity.getId())) {
             compatibleRepositoryVersionsMap.get(repositoryVersionEntity.getId()).addUpgradePackType(up.getType());
             if (LOG.isDebugEnabled()) {
-              LOG.debug("Stack id: {} exists in map.  Appended new upgrade type {}" + repositoryVersionEntity.getId(), up.getType());
+              LOG.debug("Stack id: {} exists in map.  Appended new upgrade type {}{}", up.getType(), repositoryVersionEntity.getId());
             }
           } else {
             CompatibleRepositoryVersion compatibleRepositoryVersionEntity = new CompatibleRepositoryVersion(repositoryVersionEntity);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index 292761a..2df3b00 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -369,7 +369,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
           stackId.getStackVersion(), s.getName(), request.getComponentName())) {
         throw new IllegalArgumentException("Unsupported or invalid component"
             + " in stack stackInfo=" + stackId.getStackId()
-            + " request=" + request.toString());
+            + " request=" + request);
       }
     }
 
@@ -412,7 +412,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 stackId.getStackVersion(), s.getName(), request.getComponentName());
         if (componentInfo == null) {
             throw new AmbariException("Could not get component information from stack definition: Stack=" +
-                stackId.toString() + ", Service=" + s.getName() + ", Component=" + request.getComponentName());
+              stackId + ", Service=" + s.getName() + ", Component=" + request.getComponentName());
         }
         sc.setRecoveryEnabled(componentInfo.isRecoveryEnabled());
         LOG.info("Component: {}, recovery_enabled from stack definition:{}", componentInfo.getName(),
@@ -778,7 +778,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         throw new AmbariException("Found non removable host component when trying to delete service component." +
             " To remove host component, it must be in DISABLED/INIT/INSTALLED/INSTALL_FAILED/UNKNOWN" +
             "/UNINSTALLED/INSTALLING state."
-            + ", request=" + request.toString()
+            + ", request=" + request
             + ", current state=" + sc.getDesiredState() + ".");
 
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index cc23177..c2b998c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -314,11 +314,8 @@ public class ConfigGroupResourceProvider extends
     Set<ConfigGroupResponse> responses = new HashSet<>();
     if (requests != null) {
       for (ConfigGroupRequest request : requests) {
-        LOG.debug("Received a Config group request with"
-          + ", clusterName = " + request.getClusterName()
-          + ", groupId = " + request.getId()
-          + ", groupName = " + request.getGroupName()
-          + ", tag = " + request.getTag());
+        LOG.debug("Received a Config group request with, clusterName = {}, groupId = {}, groupName = {}, tag = {}",
+          request.getClusterName(), request.getId(), request.getGroupName(), request.getTag());
 
         if (request.getClusterName() == null) {
           LOG.warn("Cluster name is a required field.");
@@ -485,9 +482,8 @@ public class ConfigGroupResourceProvider extends
       || request.getTag() == null
       || request.getTag().isEmpty()) {
 
-      LOG.debug("Received a config group request with cluster name = " +
-        request.getClusterName() + ", group name = " + request.getGroupName()
-        + ", tag = " + request.getTag());
+      LOG.debug("Received a config group request with cluster name = {}, group name = {}, tag = {}",
+        request.getClusterName(), request.getGroupName(), request.getTag());
 
       throw new IllegalArgumentException("Cluster name, group name and tag need to be provided.");
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupResourceProvider.java
index ba60239..f051651 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupResourceProvider.java
@@ -117,9 +117,7 @@ public class GroupResourceProvider extends AbstractControllerResourceProvider {
       }
     });
 
-    LOG.debug("Found group responses matching get group request"
-        + ", groupRequestSize=" + requests.size() + ", groupResponseSize="
-        + responses.size());
+    LOG.debug("Found group responses matching get group request, groupRequestSize={}, groupResponseSize={}", requests.size(), responses.size());
 
     Set<String>   requestedIds = getRequestPropertyIds(request, predicate);
     Set<Resource> resources    = new HashSet<>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
index 5166a07..aaf4656 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
@@ -554,7 +554,7 @@ public class HostComponentResourceProvider extends AbstractControllerResourcePro
         // throw exception if desired state isn't a valid desired state (static check)
         if (!newState.isValidDesiredState()) {
           throw new IllegalArgumentException("Invalid arguments, invalid"
-              + " desired state, desiredState=" + newState.toString());
+              + " desired state, desiredState=" + newState);
         }
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
index a3216eb..4e2944f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
@@ -465,7 +465,7 @@ public class HostResourceProvider extends AbstractControllerResourceProvider {
       }
       throw new IllegalArgumentException("Invalid request contains"
           + " duplicate hostnames"
-          + ", hostnames=" + names.toString());
+          + ", hostnames=" + names);
     }
 
     if (!unknowns.isEmpty()) {
@@ -480,7 +480,7 @@ public class HostResourceProvider extends AbstractControllerResourceProvider {
       }
 
       throw new IllegalArgumentException("Attempted to add unknown hosts to a cluster.  " +
-          "These hosts have not been registered with the server: " + names.toString());
+          "These hosts have not been registered with the server: " + names);
     }
 
     Map<String, Set<String>> hostClustersMap = new HashMap<>();
@@ -516,9 +516,7 @@ public class HostResourceProvider extends AbstractControllerResourceProvider {
     }
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Received a createHost request"
-          + ", hostname=" + request.getHostname()
-          + ", request=" + request);
+      LOG.debug("Received a createHost request, hostname={}, request={}", request.getHostname(), request);
     }
 
     if (allHosts.contains(request.getHostname())) {
@@ -689,9 +687,7 @@ public class HostResourceProvider extends AbstractControllerResourceProvider {
 
     for (HostRequest request : requests) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received an updateHost request"
-            + ", hostname=" + request.getHostname()
-            + ", request=" + request);
+        LOG.debug("Received an updateHost request, hostname={}, request={}", request.getHostname(), request);
       }
 
       Host host = clusters.getHost(request.getHostname());

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStatusHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStatusHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStatusHelper.java
index 11512c4..f94b979 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStatusHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStatusHelper.java
@@ -56,7 +56,7 @@ public class HostStatusHelper {
 
       componentHostResponse = hostComponents.size() == 1 ? hostComponents.iterator().next() : null;
     } catch (AmbariException e) {
-      LOG.debug("Error checking " + componentName + " server host component state: ", e);
+      LOG.debug("Error checking {} server host component state: ", componentName, e);
       return false;
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/JobResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/JobResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/JobResourceProvider.java
index 2080e93..e70c367 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/JobResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/JobResourceProvider.java
@@ -42,15 +42,15 @@ import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Resource provider for job resources.
  */
 public class JobResourceProvider extends
     AbstractJDBCResourceProvider<JobResourceProvider.JobFields> {
-  private static Log LOG = LogFactory.getLog(JobResourceProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(JobResourceProvider.class);
 
   protected static final String JOB_CLUSTER_NAME_PROPERTY_ID = PropertyHelper
       .getPropertyId("Job", "cluster_name");
@@ -224,14 +224,14 @@ public class JobResourceProvider extends
       String fields = getDBFieldString(requestedIds);
       if (requestedIds.contains(JOB_ELAPSED_TIME_PROPERTY_ID)
           && !requestedIds.contains(JOB_SUBMIT_TIME_PROPERTY_ID))
-        fields += "," + getDBField(JOB_SUBMIT_TIME_PROPERTY_ID).toString();
+        fields += "," + getDBField(JOB_SUBMIT_TIME_PROPERTY_ID);
       if (jobId == null) {
         ps = db.prepareStatement("SELECT " + fields + " FROM " + JOB_TABLE_NAME
-            + " WHERE " + JobFields.WORKFLOWID.toString() + " = ?");
+            + " WHERE " + JobFields.WORKFLOWID + " = ?");
         ps.setString(1, workflowId);
       } else {
         ps = db.prepareStatement("SELECT " + fields + " FROM " + JOB_TABLE_NAME
-            + " WHERE " + JobFields.JOBID.toString() + " = ?");
+            + " WHERE " + JobFields.JOBID + " = ?");
         ps.setString(1, jobId);
       }
       return ps.executeQuery();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/MemberResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/MemberResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/MemberResourceProvider.java
index 48a9b47..d9d56ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/MemberResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/MemberResourceProvider.java
@@ -119,9 +119,7 @@ public class MemberResourceProvider extends AbstractControllerResourceProvider {
       }
     });
 
-    LOG.debug("Found member responses matching get members request"
-        + ", membersRequestSize=" + requests.size() + ", membersResponseSize="
-        + responses.size());
+    LOG.debug("Found member responses matching get members request, membersRequestSize={}, membersResponseSize={}", requests.size(), responses.size());
 
     Set<String>   requestedIds = getRequestPropertyIds(request, predicate);
     Set<Resource> resources    = new HashSet<>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
index 8f00456..ea8fb37 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
@@ -64,7 +64,7 @@ import com.google.inject.Provider;
  */
 @StaticallyInject
 public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
-  private static Logger LOG = LoggerFactory.getLogger(PreUpgradeCheckResourceProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PreUpgradeCheckResourceProvider.class);
 
   //----- Property ID constants ---------------------------------------------
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
index f41eb26..deb0d7c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
@@ -453,7 +453,7 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
         resourceFilterList.addAll(parseRequestResourceFilter(resourceMap,
           (String) propertyMap.get(REQUEST_CLUSTER_NAME_PROPERTY_ID)));
       }
-      LOG.debug("RequestResourceFilters : " + resourceFilters);
+      LOG.debug("RequestResourceFilters : {}", resourceFilters);
     }
     // Extract operation level property
     RequestOperationLevel operationLevel = null;
@@ -658,9 +658,7 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
         status = org.apache.ambari.server.actionmanager.RequestStatus.valueOf(requestStatus);
       }
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received a Get Request Status request"
-            + ", requestId=null"
-            + ", requestStatus=" + status);
+        LOG.debug("Received a Get Request Status request, requestId=null, requestStatus={}", status);
       }
 
       maxResults = (maxResults != null) ? maxResults : BaseRequest.DEFAULT_PAGE_SIZE;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
index 476c3d4..1452c3c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestStageContainer.java
@@ -218,7 +218,7 @@ public class RequestStageContainer {
 
       if (request != null && request.getStages()!= null && !request.getStages().isEmpty()) {
         if (LOG.isDebugEnabled()) {
-          LOG.debug(String.format("Triggering Action Manager, request=%s", request));
+          LOG.debug("Triggering Action Manager, request={}", request);
         }
         actionManager.sendActions(request, actionRequest);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index 0600159..c97caee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -596,7 +596,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
       LOG.info("Received a updateService request"
           + ", clusterName=" + request.getClusterName()
           + ", serviceName=" + request.getServiceName()
-          + ", request=" + request.toString());
+          + ", request=" + request);
 
       clusterNames.add(request.getClusterName());
 
@@ -677,10 +677,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
 
       if (newState == null) {
         if (LOG.isDebugEnabled()) {
-          LOG.debug("Nothing to do for new updateService request"
-              + ", clusterName=" + request.getClusterName()
-              + ", serviceName=" + request.getServiceName()
-              + ", newDesiredState=null");
+          LOG.debug("Nothing to do for new updateService request, clusterName={}, serviceName={}, newDesiredState=null",
+            request.getClusterName(), request.getServiceName());
         }
         continue;
       }
@@ -799,12 +797,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
         changedComps.get(newState).add(sc);
       }
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Handling update to ServiceComponent"
-            + ", clusterName=" + cluster.getClusterName()
-            + ", serviceName=" + service.getName()
-            + ", componentName=" + sc.getName()
-            + ", currentDesiredState=" + oldScState
-            + ", newDesiredState=" + newState);
+        LOG.debug("Handling update to ServiceComponent, clusterName={}, serviceName={}, componentName={}, currentDesiredState={}, newDesiredState={}",
+          cluster.getClusterName(), service.getName(), sc.getName(), oldScState, newState);
       }
 
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
@@ -812,13 +806,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
         if (oldSchState == State.DISABLED || oldSchState == State.UNKNOWN) {
           //Ignore host components updates in this state
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Ignoring ServiceComponentHost"
-                + ", clusterName=" + cluster.getClusterName()
-                + ", serviceName=" + service.getName()
-                + ", componentName=" + sc.getName()
-                + ", hostname=" + sch.getHostName()
-                + ", currentState=" + oldSchState
-                + ", newDesiredState=" + newState);
+            LOG.debug("Ignoring ServiceComponentHost, clusterName={}, serviceName={}, componentName={}, hostname={}, currentState={}, newDesiredState={}",
+              cluster.getClusterName(), service.getName(), sc.getName(), sch.getHostName(), oldSchState, newState);
           }
           continue;
         }
@@ -826,13 +815,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
         if (newState == oldSchState) {
           ignoredScHosts.add(sch);
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Ignoring ServiceComponentHost"
-                + ", clusterName=" + cluster.getClusterName()
-                + ", serviceName=" + service.getName()
-                + ", componentName=" + sc.getName()
-                + ", hostname=" + sch.getHostName()
-                + ", currentState=" + oldSchState
-                + ", newDesiredState=" + newState);
+            LOG.debug("Ignoring ServiceComponentHost, clusterName={}, serviceName={}, componentName={}, hostname={}, currentState={}, newDesiredState={}",
+              cluster.getClusterName(), service.getName(), sc.getName(), sch.getHostName(), oldSchState, newState);
           }
           continue;
         }
@@ -840,11 +824,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
         if (! maintenanceStateHelper.isOperationAllowed(reqOpLvl, sch)) {
           ignoredScHosts.add(sch);
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Ignoring ServiceComponentHost"
-                + ", clusterName=" + cluster.getClusterName()
-                + ", serviceName=" + service.getName()
-                + ", componentName=" + sc.getName()
-                + ", hostname=" + sch.getHostName());
+            LOG.debug("Ignoring ServiceComponentHost, clusterName={}, serviceName={}, componentName={}, hostname={}",
+              cluster.getClusterName(), service.getName(), sc.getName(), sch.getHostName());
           }
           continue;
         }
@@ -887,13 +868,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
               new ArrayList<ServiceComponentHost>());
         }
         if (LOG.isDebugEnabled()) {
-          LOG.debug("Handling update to ServiceComponentHost"
-              + ", clusterName=" + cluster.getClusterName()
-              + ", serviceName=" + service.getName()
-              + ", componentName=" + sc.getName()
-              + ", hostname=" + sch.getHostName()
-              + ", currentState=" + oldSchState
-              + ", newDesiredState=" + newState);
+          LOG.debug("Handling update to ServiceComponentHost, clusterName={}, serviceName={}, componentName={}, hostname={}, currentState={}, newDesiredState={}",
+            cluster.getClusterName(), service.getName(), sc.getName(), sch.getHostName(), oldSchState, newState);
         }
         changedScHosts.get(sc.getName()).get(newState).add(sch);
       }
@@ -1048,8 +1024,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
       Validate.notEmpty(serviceName, "Service name should be provided when creating a service");
 
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Received a createService request"
-                + ", clusterName=" + clusterName + ", serviceName=" + serviceName + ", request=" + request);
+        LOG.debug("Received a createService request, clusterName={}, serviceName={}, request={}", clusterName, serviceName, request);
       }
 
       if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/TaskAttemptResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/TaskAttemptResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/TaskAttemptResourceProvider.java
index 2165fc7..3af7bb1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/TaskAttemptResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/TaskAttemptResourceProvider.java
@@ -42,15 +42,15 @@ import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Resource provider for task attempt resources.
  */
 public class TaskAttemptResourceProvider extends
     AbstractJDBCResourceProvider<TaskAttemptResourceProvider.TaskAttemptFields> {
-  private static Log LOG = LogFactory.getLog(TaskAttemptResourceProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TaskAttemptResourceProvider.class);
 
   protected static final String TASK_ATTEMPT_CLUSTER_NAME_PROPERTY_ID = PropertyHelper
       .getPropertyId("TaskAttempt", "cluster_name");

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/URLStreamProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/URLStreamProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/URLStreamProvider.java
index ff36d9b..d1e9349 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/URLStreamProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/URLStreamProvider.java
@@ -38,9 +38,9 @@ import javax.net.ssl.TrustManagerFactory;
 import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
 import org.apache.ambari.server.controller.utilities.StreamProvider;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.http.HttpStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * URL based implementation of a stream provider.
@@ -50,7 +50,7 @@ public class URLStreamProvider implements StreamProvider {
   public static final String COOKIE = "Cookie";
   private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
   private static final String NEGOTIATE = "Negotiate";
-  private static Log LOG = LogFactory.getLog(URLStreamProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(URLStreamProvider.class);
 
   private boolean setupTruststoreForHttps;
   private final int connTimeout;
@@ -175,7 +175,7 @@ public class URLStreamProvider implements StreamProvider {
   public HttpURLConnection processURL(String spec, String requestMethod, byte[] body, Map<String, List<String>> headers)
           throws IOException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("readFrom spec:" + spec);
+      LOG.debug("readFrom spec:{}", spec);
     }
 
     HttpURLConnection connection = (spec.startsWith("https") && this.setupTruststoreForHttps) ?
@@ -185,7 +185,7 @@ public class URLStreamProvider implements StreamProvider {
 
     String appCookie = appCookieManager.getCachedAppCookie(spec);
     if (appCookie != null) {
-      LOG.debug("Using cached app cookie for URL:" + spec);
+      LOG.debug("Using cached app cookie for URL:{}", spec);
 
       // allow for additional passed in cookies
       if (headers == null || headers.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UserResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UserResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UserResourceProvider.java
index 2ec63db..c5c36e9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UserResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UserResourceProvider.java
@@ -119,9 +119,7 @@ public class UserResourceProvider extends AbstractControllerResourceProvider imp
     });
 
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Found user responses matching get user request"
-          + ", userRequestSize=" + requests.size()
-          + ", userResponseSize=" + responses.size());
+      LOG.debug("Found user responses matching get user request, userRequestSize={}, userResponseSize={}", requests.size(), responses.size());
     }
 
     Set<String>   requestedIds = getRequestPropertyIds(request, predicate);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java
index 494f32b..2c5ab2b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java
@@ -170,7 +170,7 @@ public class WidgetLayoutResourceProvider extends AbstractControllerResourceProv
             long id = Integer.parseInt(widget.get("id").toString());
             WidgetEntity widgetEntity = widgetDAO.findById(id);
             if (widgetEntity == null) {
-              throw new AmbariException("Widget with id " + widget.get("id").toString() + " does not exists");
+              throw new AmbariException("Widget with id " + widget.get("id") + " does not exists");
             }
             WidgetLayoutUserWidgetEntity widgetLayoutUserWidgetEntity = new WidgetLayoutUserWidgetEntity();
 
@@ -308,7 +308,7 @@ public class WidgetLayoutResourceProvider extends AbstractControllerResourceProv
               long id = Integer.parseInt(widget.get("id").toString());
               WidgetEntity widgetEntity = widgetDAO.findById(id);
               if (widgetEntity == null) {
-                throw new AmbariException("Widget with id " + widget.get("id").toString() + " does not exists");
+                throw new AmbariException("Widget with id " + widget.get("id") + " does not exists");
               }
               WidgetLayoutUserWidgetEntity widgetLayoutUserWidgetEntity = new WidgetLayoutUserWidgetEntity();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WorkflowResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WorkflowResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WorkflowResourceProvider.java
index e3935bb..13cb8a4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WorkflowResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WorkflowResourceProvider.java
@@ -42,15 +42,15 @@ import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Resource provider for workflow resources.
  */
 public class WorkflowResourceProvider extends
     AbstractJDBCResourceProvider<WorkflowResourceProvider.WorkflowFields> {
-  private static Log LOG = LogFactory.getLog(WorkflowResourceProvider.class);
+  private static final Logger LOG = LoggerFactory.getLogger(WorkflowResourceProvider.class);
 
   protected static final String WORKFLOW_CLUSTER_NAME_PROPERTY_ID = PropertyHelper
       .getPropertyId("Workflow", "cluster_name");

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java
index 5347919..81d72fb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java
@@ -42,7 +42,7 @@ public final class JMXMetricHolder {
 
     for (Map<String, Object> map : beans) {
       for (Map.Entry<String, Object> entry : map.entrySet()) {
-        stringBuilder.append("    ").append(entry.toString()).append("\n");
+        stringBuilder.append("    ").append(entry).append("\n");
       }
     }
     return stringBuilder.toString();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
index 4370cfc..498b636 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
@@ -68,7 +68,7 @@ import com.google.inject.Injector;
 @AmbariService
 public class LogSearchDataRetrievalService extends AbstractService {
 
-  private static Logger LOG = LoggerFactory.getLogger(LogSearchDataRetrievalService.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LogSearchDataRetrievalService.class);
 
   /**
    * Maximum number of failed attempts that the LogSearch integration code will attempt for
@@ -147,7 +147,7 @@ public class LogSearchDataRetrievalService extends AbstractService {
     final int maxTimeoutForCacheInHours =
       ambariServerConfiguration.getLogSearchMetadataCacheExpireTimeout();
 
-    LOG.debug("Caches configured with a max expire timeout of " + maxTimeoutForCacheInHours + " hours.");
+    LOG.debug("Caches configured with a max expire timeout of {} hours.", maxTimeoutForCacheInHours);
 
     // initialize the log file name cache
     logFileNameCache = CacheBuilder.newBuilder().expireAfterWrite(maxTimeoutForCacheInHours, TimeUnit.HOURS).build();

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
index 7e7656d..ff44b0f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
@@ -70,7 +70,7 @@ import org.slf4j.LoggerFactory;
  */
 public class LoggingRequestHelperImpl implements LoggingRequestHelper {
 
-  private static Logger LOG = LoggerFactory.getLogger(LoggingRequestHelperImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LoggingRequestHelperImpl.class);
 
   private static final String LOGSEARCH_ADMIN_JSON_CONFIG_TYPE_NAME = "logsearch-admin-json";
 
@@ -156,7 +156,7 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
     try {
       // use the Apache builder to create the correct URI
       URI logSearchURI = createLogSearchQueryURI(protocol, queryParameters);
-      LOG.debug("Attempting to connect to LogSearch server at " + logSearchURI);
+      LOG.debug("Attempting to connect to LogSearch server at {}", logSearchURI);
       HttpURLConnection httpURLConnection  = (HttpURLConnection) logSearchURI.toURL().openConnection();
       secure(httpURLConnection, protocol);
       httpURLConnection.setRequestMethod("GET");
@@ -313,7 +313,7 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
       LogLineResult lineOne = response.getListOfResults().get(0);
       // this assumes that each component has only one associated log file,
       // which may not always hold true
-      LOG.debug("For componentName = " + componentName + ", log file name is = " + lineOne.getLogFilePath());
+      LOG.debug("For componentName = {}, log file name is = {}", componentName, lineOne.getLogFilePath());
       return Collections.singleton(lineOne.getLogFilePath());
 
     }
@@ -326,7 +326,7 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
     try {
       // use the Apache builder to create the correct URI
       URI logLevelQueryURI = createLogLevelQueryURI(protocol, componentName, hostName);
-      LOG.debug("Attempting to connect to LogSearch server at " + logLevelQueryURI);
+      LOG.debug("Attempting to connect to LogSearch server at {}", logLevelQueryURI);
 
       HttpURLConnection httpURLConnection  = (HttpURLConnection) logLevelQueryURI.toURL().openConnection();
       secure(httpURLConnection, protocol);
@@ -446,7 +446,8 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
       if (credential == null) {
         LOG.debug("LogSearch credentials could not be obtained from store.");
       } else {
-        LOG.debug("LogSearch credentials were not of the correct type, this is likely an error in configuration, credential type is = " + credential.getClass().getName());
+        LOG.debug("LogSearch credentials were not of the correct type, this is likely an error in configuration, credential type is = {}",
+          credential.getClass().getName());
       }
     } catch (AmbariException ambariException) {
       LOG.debug("Error encountered while trying to obtain LogSearch admin credentials.", ambariException);
@@ -488,7 +489,7 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
         // read in the response from LogSearch
         resultStream = httpURLConnection.getInputStream();
         BufferedReader reader = new BufferedReader(new InputStreamReader(resultStream));
-        LOG.debug("Response code from LogSearch Service is = " + httpURLConnection.getResponseCode());
+        LOG.debug("Response code from LogSearch Service is = {}", httpURLConnection.getResponseCode());
 
 
         String line = reader.readLine();
@@ -498,7 +499,7 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
           line = reader.readLine();
         }
 
-        LOG.debug("Sucessfully retrieved response from server, response = " + buffer);
+        LOG.debug("Sucessfully retrieved response from server, response = {}", buffer);
 
         return buffer;
       } finally {

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
index 05fbc5a..64261b3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
@@ -85,9 +85,9 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
       // Test to see if the authenticated user is authorized to view this data... if not, skip it.
       if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceID(clusterName), REQUIRED_AUTHORIZATIONS)) {
         if(LOG.isDebugEnabled()) {
-          LOG.debug(String.format("The authenticated user (%s) is not authorized to access LogSearch data for the cluster named %s",
+          LOG.debug("The authenticated user ({}) is not authorized to access LogSearch data for the cluster named {}",
               AuthorizationHelper.getAuthenticatedName(),
-              clusterName));
+              clusterName);
         }
         continue;
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsCollectorHAManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsCollectorHAManager.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsCollectorHAManager.java
index b1dfed9..f107aab 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsCollectorHAManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsCollectorHAManager.java
@@ -84,7 +84,7 @@ public class MetricsCollectorHAManager {
   @Subscribe
   public void onMetricsCollectorHostDownEvent(MetricsCollectorHostDownEvent event) {
 
-    LOG.debug("MetricsCollectorHostDownEvent caught, Down collector : " + event.getCollectorHost());
+    LOG.debug("MetricsCollectorHostDownEvent caught, Down collector : {}", event.getCollectorHost());
 
     String clusterName = event.getClusterName();
     MetricsCollectorHAClusterState collectorHAClusterState = clusterCollectorHAState.get(clusterName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsDownsamplingMethod.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsDownsamplingMethod.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsDownsamplingMethod.java
index 8044c7b..5994f16 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsDownsamplingMethod.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsDownsamplingMethod.java
@@ -40,9 +40,8 @@ public abstract class MetricsDownsamplingMethod {
         && timestamp <= temporalInfo.getEndTimeMillis();
 
     if (!retVal && LOG.isTraceEnabled()) {
-      LOG.trace("Ignoring out of band metric with ts: " + timestamp + ", "
-        + "temporalInfo: startTime = " + temporalInfo.getStartTimeMillis() + ","
-        + " endTime = " + temporalInfo.getEndTimeMillis());
+      LOG.trace("Ignoring out of band metric with ts: {}, temporalInfo: startTime = {}, endTime = {}",
+        timestamp, temporalInfo.getStartTimeMillis(), temporalInfo.getEndTimeMillis());
     }
 
     return retVal;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
index b1ab855..6d859fa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
@@ -716,7 +716,7 @@ public abstract class AMSPropertyProvider extends MetricsPropertyProvider {
   private String preprocessPropertyId(String propertyId, String componentName) {
     if (propertyId.startsWith("jvm") && JVM_PROCESS_NAMES.keySet().contains(componentName)) {
       String newPropertyId = propertyId.replace("jvm.", "jvm." + JVM_PROCESS_NAMES.get(componentName));
-      LOG.debug("Pre-process: " + propertyId + ", to: " + newPropertyId);
+      LOG.debug("Pre-process: {}, to: {}", propertyId, newPropertyId);
       return newPropertyId;
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/MetricsRequestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/MetricsRequestHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/MetricsRequestHelper.java
index efb9d2d..87a4634 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/MetricsRequestHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/MetricsRequestHelper.java
@@ -71,7 +71,7 @@ public class MetricsRequestHelper {
   }
 
   public TimelineMetrics fetchTimelineMetrics(URIBuilder uriBuilder, Long startTime, Long endTime) throws IOException {
-    LOG.debug("Metrics request url = " + uriBuilder.toString());
+    LOG.debug("Metrics request url = {}", uriBuilder);
     BufferedReader reader = null;
     TimelineMetrics timelineMetrics = null;
     try {
@@ -83,7 +83,7 @@ public class MetricsRequestHelper {
         //Try one more time with higher precision
         String higherPrecision = getHigherPrecision(uriBuilder, startTime, endTime);
         if (higherPrecision != null) {
-          LOG.debug("Requesting metrics with higher precision : " + higherPrecision);
+          LOG.debug("Requesting metrics with higher precision : {}", higherPrecision);
           uriBuilder.setParameter("precision", higherPrecision);
           String newSpec = uriBuilder.toString();
           connection = streamProvider.processURL(newSpec, HttpMethod.GET, (String) null,
@@ -102,13 +102,9 @@ public class MetricsRequestHelper {
 
       if (LOG.isTraceEnabled()) {
         for (TimelineMetric metric : timelineMetrics.getMetrics()) {
-          LOG.trace("metric: " + metric.getMetricName() +
-            ", size = " + metric.getMetricValues().size() +
-            ", host = " + metric.getHostName() +
-            ", app = " + metric.getAppId() +
-            ", instance = " + metric.getInstanceId() +
-            ", time = " + metric.getTimestamp() +
-            ", startTime = " + new Date(metric.getStartTime()));
+          LOG.trace("metric: {}, size = {}, host = {}, app = {}, instance = {}, time = {}, startTime = {}",
+            metric.getMetricName(), metric.getMetricValues().size(), metric.getHostName(), metric.getAppId(), metric.getInstanceId(), metric.getTimestamp(),
+            new Date(metric.getStartTime()));
         }
       }
     } catch (IOException io) {
@@ -136,9 +132,9 @@ public class MetricsRequestHelper {
         } catch (IOException e) {
           if (LOG.isWarnEnabled()) {
             if (LOG.isDebugEnabled()) {
-              LOG.warn("Unable to close http input stream : spec=" + uriBuilder.toString(), e);
+              LOG.warn("Unable to close http input stream : spec=" + uriBuilder, e);
             } else {
-              LOG.warn("Unable to close http input stream : spec=" + uriBuilder.toString());
+              LOG.warn("Unable to close http input stream : spec=" + uriBuilder);
             }
           }
         }
@@ -155,7 +151,7 @@ public class MetricsRequestHelper {
       BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream));
       String errorMessage = reader.readLine();
       if (errorMessage != null && errorMessage.contains("PrecisionLimitExceededException")) {
-        LOG.debug("Encountered Precision exception while requesting metrics : " + errorMessage);
+        LOG.debug("Encountered Precision exception while requesting metrics : {}", errorMessage);
         return false;
       } else {
         throw new IOException(errorMessage);

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
index 472e3ea..bfe8456 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCache.java
@@ -57,7 +57,7 @@ public class TimelineMetricCache extends UpdatingSelfPopulatingCache {
    */
   public TimelineMetrics getAppTimelineMetricsFromCache(TimelineAppMetricCacheKey key) throws IllegalArgumentException, IOException {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("Fetching metrics with key: " + key);
+      LOG.debug("Fetching metrics with key: {}", key);
     }
 
     // Make sure key is valid
@@ -84,7 +84,7 @@ public class TimelineMetricCache extends UpdatingSelfPopulatingCache {
     if (element != null && element.getObjectValue() != null) {
       TimelineMetricsCacheValue value = (TimelineMetricsCacheValue) element.getObjectValue();
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Returning value from cache: " + value);
+        LOG.debug("Returning value from cache: {}", value);
       }
       timelineMetrics = value.getTimelineMetrics();
     }
@@ -93,14 +93,9 @@ public class TimelineMetricCache extends UpdatingSelfPopulatingCache {
       // Print stats every 100 calls - Note: Supported in debug mode only
       if (printCacheStatsCounter.getAndIncrement() == 0) {
         StatisticsGateway statistics = this.getStatistics();
-        LOG.debug("Metrics cache stats => \n" +
-          ", Evictions = " + statistics.cacheEvictedCount() +
-          ", Expired = " + statistics.cacheExpiredCount() +
-          ", Hits = " + statistics.cacheHitCount() +
-          ", Misses = " + statistics.cacheMissCount() +
-          ", Hit ratio = " + statistics.cacheHitRatio() +
-          ", Puts = " + statistics.cachePutCount() +
-          ", Size in MB = " + (statistics.getLocalHeapSizeInBytes() / 1048576));
+        LOG.debug("Metrics cache stats => \n, Evictions = {}, Expired = {}, Hits = {}, Misses = {}, Hit ratio = {}, Puts = {}, Size in MB = {}",
+          statistics.cacheEvictedCount(), statistics.cacheExpiredCount(), statistics.cacheHitCount(), statistics.cacheMissCount(), statistics.cacheHitRatio(),
+          statistics.cachePutCount(), statistics.getLocalHeapSizeInBytes() / 1048576);
       } else {
         printCacheStatsCounter.compareAndSet(100, 0);
       }
@@ -119,26 +114,23 @@ public class TimelineMetricCache extends UpdatingSelfPopulatingCache {
     Element element = this.getQuiet(key);
     if (element != null) {
       if (LOG.isTraceEnabled()) {
-        LOG.trace("key : " + element.getObjectKey());
-        LOG.trace("value : " + element.getObjectValue());
+        LOG.trace("key : {}", element.getObjectKey());
+        LOG.trace("value : {}", element.getObjectValue());
       }
 
       // Set new time boundaries on the key
       TimelineAppMetricCacheKey existingKey = (TimelineAppMetricCacheKey) element.getObjectKey();
 
-      LOG.debug("Existing temporal info: " + existingKey.getTemporalInfo() +
-        " for : " + existingKey.getMetricNames());
+      LOG.debug("Existing temporal info: {} for : {}", existingKey.getTemporalInfo(), existingKey.getMetricNames());
 
       TimelineAppMetricCacheKey newKey = (TimelineAppMetricCacheKey) key;
       existingKey.setTemporalInfo(newKey.getTemporalInfo());
 
-      LOG.debug("New temporal info: " + newKey.getTemporalInfo() +
-        " for : " + existingKey.getMetricNames());
+      LOG.debug("New temporal info: {} for : {}", newKey.getTemporalInfo(), existingKey.getMetricNames());
 
       if (existingKey.getSpec() == null || !existingKey.getSpec().equals(newKey.getSpec())) {
         existingKey.setSpec(newKey.getSpec());
-        LOG.debug("New spec: " + newKey.getSpec() +
-          " for : " + existingKey.getMetricNames());
+        LOG.debug("New spec: {} for : {}", newKey.getSpec(), existingKey.getMetricNames());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
index b9f3e0f..343587b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricCacheEntryFactory.java
@@ -75,7 +75,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
    */
   @Override
   public Object createEntry(Object key) throws Exception {
-    LOG.debug("Creating cache entry since none exists, key = " + key);
+    LOG.debug("Creating cache entry since none exists, key = {}", key);
     TimelineAppMetricCacheKey metricCacheKey = (TimelineAppMetricCacheKey) key;
 
     TimelineMetrics timelineMetrics = null;
@@ -85,7 +85,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
         metricCacheKey.getTemporalInfo().getStartTimeMillis(),
         metricCacheKey.getTemporalInfo().getEndTimeMillis());
     } catch (IOException io) {
-      LOG.debug("Caught IOException on fetching metrics. " + io.getMessage());
+      LOG.debug("Caught IOException on fetching metrics. {}", io.getMessage());
       throw io;
     }
 
@@ -100,7 +100,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
           metricCacheKey.getTemporalInfo().getEndTimeMillis()) //Initial Precision
       );
 
-      LOG.debug("Created cache entry: " + value);
+      LOG.debug("Created cache entry: {}", value);
     }
 
     return value;
@@ -120,7 +120,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
     TimelineAppMetricCacheKey metricCacheKey = (TimelineAppMetricCacheKey) key;
     TimelineMetricsCacheValue existingMetrics = (TimelineMetricsCacheValue) value;
 
-    LOG.debug("Updating cache entry, key: " + key + ", with value = " + value);
+    LOG.debug("Updating cache entry, key: {}, with value = {}", key, value);
 
     Long existingSeriesStartTime = existingMetrics.getStartTime();
     Long existingSeriesEndTime = existingMetrics.getEndTime();
@@ -139,12 +139,12 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
     Long newEndTime = null;
     if(!requestedPrecision.equals(currentPrecision)) {
       // Ignore cache entry. Get the entire data from the AMS and update the cache.
-      LOG.debug("Precision changed from " + currentPrecision + " to " + requestedPrecision);
+      LOG.debug("Precision changed from {} to {}", currentPrecision, requestedPrecision);
       newStartTime = requestedStartTime;
       newEndTime = requestedEndTime;
     } else {
       //Get only the metric values for the delta period from the cache.
-      LOG.debug("No change in precision " + currentPrecision);
+      LOG.debug("No change in precision {}", currentPrecision);
       newStartTime = getRefreshRequestStartTime(existingSeriesStartTime,
           existingSeriesEndTime, requestedStartTime);
       newEndTime = getRefreshRequestEndTime(existingSeriesStartTime,
@@ -157,13 +157,11 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
        !((newStartTime.equals(existingSeriesStartTime) &&
        newEndTime.equals(existingSeriesEndTime)) && requestedPrecision.equals(currentPrecision)) ) {
 
-      LOG.debug("Existing cached timeseries startTime = " +
-          new Date(getMillisecondsTime(existingSeriesStartTime)) + ", endTime = " +
-          new Date(getMillisecondsTime(existingSeriesEndTime)));
+      LOG.debug("Existing cached timeseries startTime = {}, endTime = {}",
+        new Date(getMillisecondsTime(existingSeriesStartTime)), new Date(getMillisecondsTime(existingSeriesEndTime)));
 
-      LOG.debug("Requested timeseries startTime = " +
-          new Date(getMillisecondsTime(newStartTime)) + ", endTime = " +
-          new Date(getMillisecondsTime(newEndTime)));
+      LOG.debug("Requested timeseries startTime = {}, endTime = {}",
+        new Date(getMillisecondsTime(newStartTime)), new Date(getMillisecondsTime(newEndTime)));
 
       // Update spec with new start and end time
       uriBuilder.setParameter("startTime", String.valueOf(newStartTime));
@@ -190,9 +188,8 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
         throw io;
       }
     } else {
-      LOG.debug("Skip updating cache with new startTime = " +
-        new Date(getMillisecondsTime(newStartTime)) +
-        ", new endTime = " + new Date(getMillisecondsTime(newEndTime)));
+      LOG.debug("Skip updating cache with new startTime = {}, new endTime = {}",
+        new Date(getMillisecondsTime(newStartTime)), new Date(getMillisecondsTime(newEndTime)));
     }
   }
 
@@ -214,9 +211,8 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
         if (LOG.isTraceEnabled()) {
           TreeMap<Long, Double> sortedMetrics = new TreeMap<>(timelineMetric.getMetricValues());
 
-          LOG.trace("New metric: " + timelineMetric.getMetricName() +
-            " # " + timelineMetric.getMetricValues().size() + ", startTime = " +
-            sortedMetrics.firstKey() + ", endTime = " + sortedMetrics.lastKey());
+          LOG.trace("New metric: {} # {}, startTime = {}, endTime = {}",
+            timelineMetric.getMetricName(), timelineMetric.getMetricValues().size(), sortedMetrics.firstKey(), sortedMetrics.lastKey());
         }
 
         TimelineMetric existingMetric = null;
@@ -233,9 +229,8 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
 
           if (LOG.isTraceEnabled()) {
             TreeMap<Long, Double> sortedMetrics = new TreeMap<>(existingMetric.getMetricValues());
-            LOG.trace("Merged metric: " + timelineMetric.getMetricName() + ", " +
-              "Final size: " + existingMetric.getMetricValues().size() + ", startTime = " +
-              sortedMetrics.firstKey() + ", endTime = " + sortedMetrics.lastKey());
+            LOG.trace("Merged metric: {}, Final size: {}, startTime = {}, endTime = {}",
+              timelineMetric.getMetricName(), existingMetric.getMetricValues().size(), sortedMetrics.firstKey(), sortedMetrics.lastKey());
           }
         } else {
           existingTimelineMetrics.getMetrics().add(timelineMetric);
@@ -253,8 +248,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
         existingMetric.setMetricValues(new TreeMap<Long, Double>());
       } else {
         TreeMap<Long, Double> existingMetricValues = existingMetric.getMetricValues();
-        LOG.trace("Existing metric: " + existingMetric.getMetricName() +
-          " # " + existingMetricValues.size());
+        LOG.trace("Existing metric: {} # {}", existingMetric.getMetricName(), existingMetricValues.size());
 
         // Retain only the values that are within the [requestStartTime, requestedEndTime] window
         existingMetricValues.headMap(requestedStartTime,false).clear();
@@ -281,8 +275,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
       startTime = getTimeShiftedStartTime(existingSeriesEndTime);
     }
 
-    LOG.trace("Requesting timeseries data with new startTime = " +
-      new Date(getMillisecondsTime(startTime)));
+    LOG.trace("Requesting timeseries data with new startTime = {}", new Date(getMillisecondsTime(startTime)));
 
     return startTime;
   }
@@ -303,8 +296,7 @@ public class TimelineMetricCacheEntryFactory implements UpdatingCacheEntryFactor
       endTime = existingSeriesStartTime;
     }
 
-    LOG.trace("Requesting timeseries data with new endTime = " +
-      new Date(getMillisecondsTime(endTime)));
+    LOG.trace("Requesting timeseries data with new endTime = {}", new Date(getMillisecondsTime(endTime)));
     return endTime;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
index 1470da9..2401d75 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/cache/TimelineMetricsCacheSizeOfEngine.java
@@ -124,7 +124,7 @@ public class TimelineMetricsCacheSizeOfEngine implements SizeOfEngine {
           timelineMetricPrimitivesApproximation += reflectionSizeOf.sizeOf(metric.getType());
           timelineMetricPrimitivesApproximation += 8; // Object overhead
 
-          LOG.debug("timelineMetricPrimitivesApproximation bytes = " + timelineMetricPrimitivesApproximation);
+          LOG.debug("timelineMetricPrimitivesApproximation bytes = {}", timelineMetricPrimitivesApproximation);
         }
         size += timelineMetricPrimitivesApproximation;
 
@@ -132,11 +132,11 @@ public class TimelineMetricsCacheSizeOfEngine implements SizeOfEngine {
         if (metricValues != null && !metricValues.isEmpty()) {
           // Numeric wrapper: 12 bytes + 8 bytes Data type + 4 bytes alignment = 48 (Long, Double)
           // Tree Map: 12 bytes for header + 20 bytes for 5 object fields : pointers + 1 byte for flag = 40
-         LOG.debug("Size of metric value: " + (sizeOfMapEntry + sizeOfMapEntryOverhead) * metricValues.size());
+          LOG.debug("Size of metric value: {}", (sizeOfMapEntry + sizeOfMapEntryOverhead) * metricValues.size());
           size += (sizeOfMapEntry + sizeOfMapEntryOverhead) * metricValues.size(); // Treemap size is O(1)
         }
       }
-      LOG.debug("Total Size of metric values in cache: " + size);
+      LOG.debug("Total Size of metric values in cache: {}", size);
     }
 
     return size;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/KerberosChecker.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/KerberosChecker.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/KerberosChecker.java
index 5097c09..b262c12 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/KerberosChecker.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/KerberosChecker.java
@@ -41,7 +41,7 @@ public class KerberosChecker {
   public static final String JAVA_SECURITY_AUTH_LOGIN_CONFIG =
       "java.security.auth.login.config";
 
-  private static Logger LOG = LoggerFactory.getLogger(KerberosChecker.class);
+  private static final Logger LOG = LoggerFactory.getLogger(KerberosChecker.class);
 
   @Inject
   static Configuration config;

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
index d95a753..935361c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/ServiceCalculatedStateFactory.java
@@ -51,7 +51,7 @@ public class ServiceCalculatedStateFactory {
     try {
       serviceType = Service.Type.valueOf(service);
     } catch (Exception e){
-      LOG.debug(String.format("Could not parse service name \"%s\", will use default state provider", service));
+      LOG.debug("Could not parse service name \"{}\", will use default state provider", service);
     }
 
     if (serviceType == null) {  // service is unknown, return default service state provider

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/credentialapi/CredentialUtil.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/credentialapi/CredentialUtil.java b/ambari-server/src/main/java/org/apache/ambari/server/credentialapi/CredentialUtil.java
index e6d7a37..52325d4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/credentialapi/CredentialUtil.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/credentialapi/CredentialUtil.java
@@ -424,7 +424,7 @@ public class CredentialUtil extends Configured implements Tool {
           // prompt the user to confirm or reject the overwrite
           overwrite = ToolRunner
                   .confirmPrompt("You are about to OVERWRITE the credential " +
-                          alias + " from CredentialProvider " + provider.toString() +
+                          alias + " from CredentialProvider " + provider +
                           ". Continue? ");
         }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java b/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
index 9260993..1f2d26f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
@@ -105,9 +105,9 @@ public class ActionDefinitionManager {
           continue;
         }
         for (ActionDefinitionSpec ad : adx.actionDefinitions()) {
-          LOG.debug("Read action definition = " + ad.toString());
+          LOG.debug("Read action definition = {}", ad);
           StringBuilder errorReason =
-              new StringBuilder("Error while parsing action definition. ").append(ad.toString()).append(" --- ");
+              new StringBuilder("Error while parsing action definition. ").append(ad).append(" --- ");
 
           TargetHostType targetType = safeValueOf(TargetHostType.class, ad.getTargetType(), errorReason);
           ActionType actionType = safeValueOf(ActionType.class, ad.getActionType(), errorReason);
@@ -121,7 +121,7 @@ public class ActionDefinitionManager {
             String actionName = ad.getActionName();
             if (actionDefinitionMap.containsKey(actionName)) {
               LOG.warn("Ignoring action definition as a different definition by that name already exists. "
-                  + ad.toString());
+                  + ad);
               continue;
             }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertHashInvalidationListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertHashInvalidationListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertHashInvalidationListener.java
index 2d37430..f418abb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertHashInvalidationListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertHashInvalidationListener.java
@@ -56,7 +56,7 @@ public class AlertHashInvalidationListener {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertHashInvalidationListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertHashInvalidationListener.class);
 
   /**
    * Invalidates hosts so that they can receive updated alert definition

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertLifecycleListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertLifecycleListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertLifecycleListener.java
index 31c2f4a..9dce45a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertLifecycleListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertLifecycleListener.java
@@ -53,7 +53,7 @@ public class AlertLifecycleListener {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertLifecycleListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertLifecycleListener.class);
 
   /**
    * Used for quick lookups of aggregate alerts.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertMaintenanceModeListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertMaintenanceModeListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertMaintenanceModeListener.java
index 64372e0..f73d7c9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertMaintenanceModeListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertMaintenanceModeListener.java
@@ -54,7 +54,7 @@ public class AlertMaintenanceModeListener {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertMaintenanceModeListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertMaintenanceModeListener.class);
 
   /**
    * Publishes {@link AlertEvent} instances.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceComponentHostListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceComponentHostListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceComponentHostListener.java
index ec61e8c..3563e99 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceComponentHostListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceComponentHostListener.java
@@ -38,7 +38,7 @@ public class AlertServiceComponentHostListener {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertServiceComponentHostListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertServiceComponentHostListener.class);
 
   /**
    * Used for deleting the alert notices when a definition is disabled.

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceStateListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceStateListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceStateListener.java
index 7a936fe..053fb54 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceStateListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertServiceStateListener.java
@@ -60,7 +60,7 @@ public class AlertServiceStateListener {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertServiceStateListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertServiceStateListener.class);
 
   /**
    * Services metainfo; injected lazily as a {@link Provider} since JPA is not

http://git-wip-us.apache.org/repos/asf/ambari/blob/219f2234/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
index bc24b6d..d5dc530 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
@@ -77,7 +77,7 @@ public class AlertStateChangedListener {
   /**
    * Logger.
    */
-  private static Logger LOG = LoggerFactory.getLogger(AlertStateChangedListener.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AlertStateChangedListener.class);
 
   /**
    * A logger that is only for logging alert state changes so that there is an


[10/50] [abbrv] ambari git commit: AMBARI-21086. Hive view 2.0 does not work on IE 11 browser (pallavkul)

Posted by nc...@apache.org.
AMBARI-21086. Hive view 2.0 does not work on IE 11 browser (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84c4e0d6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84c4e0d6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84c4e0d6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 84c4e0d652db5b06f830073e9a8dc899c38daef6
Parents: 4247f69
Author: pallavkul <pa...@gmail.com>
Authored: Wed Jun 7 12:22:57 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Jun 7 12:22:57 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/ember-cli-build.js    |   1 +
 .../resources/ui/vendor/browser-pollyfills.js   | 213 +++++++++++++++++++
 2 files changed, 214 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4e0d6/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js b/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
index 01682e7..325cc1b 100644
--- a/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
+++ b/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
@@ -60,6 +60,7 @@ module.exports = function(defaults) {
    app.import('bower_components/jquery-ui/jquery-ui.js');
    app.import('bower_components/jquery-ui/themes/base/jquery-ui.css');
    app.import('bower_components/codemirror/addon/hint/show-hint.css');
+   app.import('vendor/browser-pollyfills.js');
 
   /*
   app.import('vendor/codemirror/codemirror-min.js');

http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4e0d6/contrib/views/hive20/src/main/resources/ui/vendor/browser-pollyfills.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/vendor/browser-pollyfills.js b/contrib/views/hive20/src/main/resources/ui/vendor/browser-pollyfills.js
new file mode 100644
index 0000000..88a59c1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/vendor/browser-pollyfills.js
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+if (!String.prototype.startsWith) {
+  String.prototype.startsWith = function (searchString, position) {
+    position = position || 0;
+    return this.substr(position, searchString.length) === searchString;
+  };
+}
+
+if (!String.prototype.endsWith) {
+  String.prototype.endsWith = function (searchString, position) {
+    var subjectString = this.toString();
+    if (typeof position !== 'number' || !isFinite(position) || Math.floor(position) !== position || position > subjectString.length) {
+      position = subjectString.length;
+    }
+    position -= searchString.length;
+    var lastIndex = subjectString.lastIndexOf(searchString, position);
+    return lastIndex !== -1 && lastIndex === position;
+  };
+}
+
+if (typeof Object.assign != 'function') {
+  Object.assign = function (target, varArgs) { // .length of function is 2
+    'use strict';
+    if (target == null) { // TypeError if undefined or null
+      throw new TypeError('Cannot convert undefined or null to object');
+    }
+
+    var to = Object(target);
+
+    for (var index = 1; index < arguments.length; index++) {
+      var nextSource = arguments[index];
+
+      if (nextSource != null) { // Skip over if undefined or null
+        for (var nextKey in nextSource) {
+          // Avoid bugs when hasOwnProperty is shadowed
+          if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) {
+            to[nextKey] = nextSource[nextKey];
+          }
+        }
+      }
+    }
+    return to;
+  };
+}
+
+
+if (!Array.from) {
+  Array.from = (function () {
+    var toStr = Object.prototype.toString;
+    var isCallable = function (fn) {
+      return typeof fn === 'function' || toStr.call(fn) === '[object Function]';
+    };
+    var toInteger = function (value) {
+      var number = Number(value);
+      if (isNaN(number)) {
+        return 0;
+      }
+      if (number === 0 || !isFinite(number)) {
+        return number;
+      }
+      return (number > 0 ? 1 : -1) * Math.floor(Math.abs(number));
+    };
+    var maxSafeInteger = Math.pow(2, 53) - 1;
+    var toLength = function (value) {
+      var len = toInteger(value);
+      return Math.min(Math.max(len, 0), maxSafeInteger);
+    };
+
+    // The length property of the from method is 1.
+    return function from(arrayLike/*, mapFn, thisArg */) {
+      // 1. Let C be the this value.
+      var C = this;
+
+      // 2. Let items be ToObject(arrayLike).
+      var items = Object(arrayLike);
+
+      // 3. ReturnIfAbrupt(items).
+      if (arrayLike == null) {
+        throw new TypeError('Array.from requires an array-like object - not null or undefined');
+      }
+
+      // 4. If mapfn is undefined, then let mapping be false.
+      var mapFn = arguments.length > 1 ? arguments[1] : void undefined;
+      var T;
+      if (typeof mapFn !== 'undefined') {
+        // 5. else
+        // 5. a If IsCallable(mapfn) is false, throw a TypeError exception.
+        if (!isCallable(mapFn)) {
+          throw new TypeError('Array.from: when provided, the second argument must be a function');
+        }
+
+        // 5. b. If thisArg was supplied, let T be thisArg; else let T be undefined.
+        if (arguments.length > 2) {
+          T = arguments[2];
+        }
+      }
+
+      // 10. Let lenValue be Get(items, "length").
+      // 11. Let len be ToLength(lenValue).
+      var len = toLength(items.length);
+
+      // 13. If IsConstructor(C) is true, then
+      // 13. a. Let A be the result of calling the [[Construct]] internal method
+      // of C with an argument list containing the single item len.
+      // 14. a. Else, Let A be ArrayCreate(len).
+      var A = isCallable(C) ? Object(new C(len)) : new Array(len);
+
+      // 16. Let k be 0.
+      var k = 0;
+      // 17. Repeat, while k < len… (also steps a - h)
+      var kValue;
+      while (k < len) {
+        kValue = items[k];
+        if (mapFn) {
+          A[k] = typeof T === 'undefined' ? mapFn(kValue, k) : mapFn.call(T, kValue, k);
+        } else {
+          A[k] = kValue;
+        }
+        k += 1;
+      }
+      // 18. Let putStatus be Put(A, "length", len, true).
+      A.length = len;
+      // 20. Return A.
+      return A;
+    };
+  }());
+}
+
+Number.isNaN = Number.isNaN || function (value) {
+    return typeof value === 'number' && isNaN(value);
+  }
+
+
+if (!String.fromCodePoint) {
+  (function () {
+    var defineProperty = (function () {
+      // IE 8 only supports `Object.defineProperty` on DOM elements
+      try {
+        var object = {};
+        var $defineProperty = Object.defineProperty;
+        var result = $defineProperty(object, object, object) && $defineProperty;
+      } catch (error) {
+      }
+      return result;
+    }());
+    var stringFromCharCode = String.fromCharCode;
+    var floor = Math.floor;
+    var fromCodePoint = function () {
+      var MAX_SIZE = 0x4000;
+      var codeUnits = [];
+      var highSurrogate;
+      var lowSurrogate;
+      var index = -1;
+      var length = arguments.length;
+      if (!length) {
+        return '';
+      }
+      var result = '';
+      while (++index < length) {
+        var codePoint = Number(arguments[index]);
+        if (
+          !isFinite(codePoint) ||       // `NaN`, `+Infinity`, or `-Infinity`
+          codePoint < 0 ||              // not a valid Unicode code point
+          codePoint > 0x10FFFF ||       // not a valid Unicode code point
+          floor(codePoint) != codePoint // not an integer
+        ) {
+          throw RangeError('Invalid code point: ' + codePoint);
+        }
+        if (codePoint <= 0xFFFF) { // BMP code point
+          codeUnits.push(codePoint);
+        } else { // Astral code point; split in surrogate halves
+          // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
+          codePoint -= 0x10000;
+          highSurrogate = (codePoint >> 10) + 0xD800;
+          lowSurrogate = (codePoint % 0x400) + 0xDC00;
+          codeUnits.push(highSurrogate, lowSurrogate);
+        }
+        if (index + 1 == length || codeUnits.length > MAX_SIZE) {
+          result += stringFromCharCode.apply(null, codeUnits);
+          codeUnits.length = 0;
+        }
+      }
+      return result;
+    };
+    if (defineProperty) {
+      defineProperty(String, 'fromCodePoint', {
+        'value': fromCodePoint,
+        'configurable': true,
+        'writable': true
+      });
+    } else {
+      String.fromCodePoint = fromCodePoint;
+    }
+  }());
+}


[39/50] [abbrv] ambari git commit: AMBARI-21218 Enable Kerberos UI is blank the first time opens it (dili)

Posted by nc...@apache.org.
AMBARI-21218 Enable Kerberos UI is blank the first time opens it (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2bea1205
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2bea1205
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2bea1205

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 2bea1205e213db7fc35fc9a0aac0453bbdeaa417
Parents: d97aa1d
Author: Di Li <di...@apache.org>
Authored: Fri Jun 9 16:37:40 2017 -0400
Committer: Di Li <di...@apache.org>
Committed: Fri Jun 9 16:37:40 2017 -0400

----------------------------------------------------------------------
 .../app/controllers/main/admin/stack_and_upgrade_controller.js     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2bea1205/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index a676f7429..3483b29 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -1863,7 +1863,7 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    * get the installed repositories of HDP from server
    */
   loadRepositories: function () {
-    if (App.router.get('clusterController.isLoaded')) {
+    if (App.router.get('clusterController.isLoaded') && App.get('currentStackVersion')) {
       var nameVersionCombo = App.get('currentStackVersion');
       var stackName = nameVersionCombo.split('-')[0];
       var stackVersion = nameVersionCombo.split('-')[1];


[13/50] [abbrv] ambari git commit: AMBARI-21177. Ambari 3.0: Outstanding dashboard issues. (Ishan via Jaimin)

Posted by nc...@apache.org.
AMBARI-21177. Ambari 3.0: Outstanding dashboard issues. (Ishan via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3b575fb9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3b575fb9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3b575fb9

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 3b575fb917a559ffcbde3bb6ae114c71aa9f2b4f
Parents: acb2f98
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Wed Jun 7 10:01:31 2017 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Wed Jun 7 10:01:31 2017 -0700

----------------------------------------------------------------------
 ambari-web/app/config.js                        |   3 +-
 ambari-web/app/messages.js                      |   4 +-
 ambari-web/app/styles/application.less          |  11 ++
 ambari-web/app/styles/bootstrap_overrides.less  |  49 ++++++++
 ambari-web/app/styles/dashboard.less            | 115 ++++++++++++-------
 .../templates/common/export_metrics_menu.hbs    |  16 ++-
 ambari-web/app/templates/main/dashboard.hbs     |   2 +-
 .../main/dashboard/widgets/cluster_metrics.hbs  |  12 +-
 .../main/dashboard/widgets/hbase_links.hbs      |  15 ++-
 .../main/dashboard/widgets/hdfs_links.hbs       |  17 ++-
 .../main/dashboard/widgets/pie_chart.hbs        |  21 ++--
 .../main/dashboard/widgets/simple_text.hbs      |  21 ++--
 .../templates/main/dashboard/widgets/uptime.hbs |  16 ++-
 .../main/dashboard/widgets/yarn_links.hbs       |  18 +--
 ambari-web/app/views/common/chart/pie.js        |  10 +-
 .../dashboard/widgets/cluster_metrics_widget.js |   3 -
 .../main/dashboard/widgets/pie_chart_widget.js  |  23 ++--
 17 files changed, 241 insertions(+), 115 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/config.js b/ambari-web/app/config.js
index de4b52a..e7190be 100644
--- a/ambari-web/app/config.js
+++ b/ambari-web/app/config.js
@@ -55,7 +55,8 @@ App.isManagedMySQLForHiveEnabled = false;
 App.isStormMetricsSupported = true;
 App.healthStatusRed = '#EF6162';
 App.healthStatusGreen = '#1EB475';
-App.healthStatusOrange = '#E98A41';
+App.healthStatusOrange = '#E98A40';
+App.widgetContentColor = '#666666';
 App.inactivityRemainTime = 60; // in seconds
 App.enableLogger = true;
 App.stackVersionsAvailable = true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 02a54f7..6f317a3 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -313,8 +313,8 @@ Em.I18n.translations = {
   'common.testing': 'Testing',
   'common.noData': 'No Data',
   'common.export': 'Export',
-  'common.csv': 'CSV',
-  'common.json': 'JSON',
+  'common.csv': 'Save as CSV',
+  'common.json': 'Save as JSON',
   'common.timestamp': 'Timestamp',
   'common.timezone': 'Timezone',
   'common.loading.eclipses': 'Loading...',

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 71a9183..8ae11b5 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -2691,3 +2691,14 @@ a.abort-icon:hover {
   }
 }
 
+.ellipsis-menu:after {
+  content: '\2807';
+  color: #999999;
+  font-size: 22px;
+}
+
+.button-border {
+  padding: 0px;
+  border: none;
+  background: none;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/styles/bootstrap_overrides.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/bootstrap_overrides.less b/ambari-web/app/styles/bootstrap_overrides.less
index aabf6f2..0960a52 100644
--- a/ambari-web/app/styles/bootstrap_overrides.less
+++ b/ambari-web/app/styles/bootstrap_overrides.less
@@ -374,6 +374,55 @@ select.form-control {
   }
 }
 
+
+@media (min-width: 1500px) {
+
+  .row {
+    *zoom: 1;
+  }
+
+  .row:before,
+  .row:after {
+    line-height: 0;
+  }
+
+  .row:after {
+    clear: both;
+  }
+
+  .navbar-fixed-top .container,
+  .navbar-fixed-bottom .container {
+    width: 1300px;
+  }
+
+  .main-container {
+    width: 1340px;
+  }
+
+  .contribview  .main-container {
+    width: auto;
+  }
+
+  .contribview .navbar .main-container {
+    width: 1340px;
+    margin: 0 auto;
+  }
+
+  .thumbnails {
+    margin-left: -30px;
+  }
+
+  .thumbnails > li {
+    margin-left: 30px;
+  }
+
+  .row .thumbnails {
+    margin-left: 0;
+    margin-right: 0;
+  }
+}
+
+
 .wizard .wizard-body {
   padding: 0 !important;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/styles/dashboard.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/dashboard.less b/ambari-web/app/styles/dashboard.less
index 53e55b1..913fc35 100644
--- a/ambari-web/app/styles/dashboard.less
+++ b/ambari-web/app/styles/dashboard.less
@@ -19,6 +19,20 @@
 @import 'common.less';
 
 #dashboard-widgets-container{
+  .tabs-left {
+    float: left;
+  }
+  .btn-toolbar {
+    float: right;
+    padding-top: 5px;
+    margin-bottom: 20px;
+  }
+  .dashboard-widgets-box {
+    clear: both;
+    display: inline-block;
+    padding: 10px;
+    background-color: #ececec;
+  }
   #widgets-options-menu {
     .add-widgets-text .dropdown-menu {
       overflow: auto;
@@ -46,23 +60,41 @@
     }
     .span2p4 {
       float: left;
-      width: 19.60%;
-      *width: 19.60%;
+      width: 22.70%;
+      *width: 22.70%;
     }
     .thumbnails > div {
-      margin: 0 3px 3px 0;
-      height: 163px;
+      margin: 10px;
+      height: 160px;
     }
     .thumbnails li {
-      height: 160px;
+      height: 157px;
       width: 100%;
       margin: 3px 3px 0 0;
+      border-radius: 2px;
+      -webkit-box-shadow: 0px 1px 1px 0px rgba(51, 51, 51, 0.15);
+      -moz-box-shadow: 0px 1px 1px 0px rgba(51, 51, 51, 0.15);
+      box-shadow: 0px 1px 1px 0px rgba(51, 51, 51, 0.15);
     }
 
     .img-thumbnail .corner-icon {
-      display: none;
       position: relative;
-      padding: 7px 0;
+      color: #999999;
+      font-size: 14px;
+      padding: 10px 15px 20px 15px;
+      float: right;
+      ul {
+        top: inherit;
+        margin-left: 20px;
+      }
+      li{
+        padding: 2.5px 0 2.5px 5px;
+        height: auto;
+        box-shadow: none;
+      }
+      li:hover {
+        background-color: #f5f5f5;
+      }
       .glyphicon-remove-sign{
         color: #000;
         text-shadow: #fff 0 0 15px;
@@ -82,10 +114,11 @@
       }
     }
     .img-thumbnail .hidden-info-general{
-      color: #555;
+      background-color: #ffffff;
+      opacity: 0.9;
+      color: #666666;
       font-size: 12px;
-      font-weight: bold;
-      text-align: center;
+      text-align: left;
       text-decoration: none;
       display: none;
       position: relative;
@@ -110,16 +143,16 @@
       }
     }
     .img-thumbnail .caption {
-      padding: 7px 5px;
-      color: #555;
-      font-weight:bold;
-      font-size: 12px;
+      padding: 10px 15px 20px 15px ;
+      color: #666666;
+      font-size: 14px;
       text-align: left;
       position: relative;
+      float: left;
     }
     .img-thumbnail .widget-content{
       text-align: center;
-      font-size: 35px;
+      font-size: 30px;
       padding-top: 40px; //svg
       position: relative;
       .disabled-hdfs-link {
@@ -139,10 +172,9 @@
     }
     .img-thumbnail .widget-content-isNA{ // for pie chart n\a
       text-align: center;
-      font-size: 35px;
-      color: #D6DDDF;
+      font-size: 30px;
+      color: #999999;
       padding-top: 70px;
-      font-weight: bold;
       position: relative;
     }
     .img-thumbnail{
@@ -165,8 +197,7 @@
         display: block;
       }
       .caption{
-        margin-left: -14.5px;
-        z-index: 7;
+        z-index: 9;
       }
       .slots-caption{
         margin-left: -13px;
@@ -178,7 +209,7 @@
         z-index: 5;
       }
       .widget-content, .widget-content-isNA {
-        .content-mx(-104px);
+        .content-mx(-114px);
       }
       .uptime-content{
         top: -116px;
@@ -228,19 +259,18 @@
           text-decoration: none;
           z-index: 9;
         }
-        .caption {
-          margin-left: -14.5px;
-        }
       }
     }
 
     .links .img-thumbnail{
       li{
         height:24px;
+        box-shadow: none;
       }
       .link-button{
-        float: right;
         margin-top: 5px;
+        padding-left: 15px;
+        font-size: 12px;
         .disabled-hdfs-quick-link {
           pointer-events: none;
           color: #808080;
@@ -256,12 +286,11 @@
         text-align: center;
         font-size: 11px;
         color: #555;
-        padding-top: 40px;
+        padding: 40px 0 0 15px;
         table{
           vertical-position: center;
-          text-align: center;
-          margin-left:auto;
-          margin-right:auto;
+          text-align: left;
+          font-size: 12px;
         }
         td{
           padding-top: 2px;
@@ -275,9 +304,6 @@
         text-decoration: none;
         z-index: 9;
       }
-      .caption{
-        margin-left: -14.5px;
-      }
     }
     .img-thumbnail .widget-content .svg {
       position: relative;
@@ -286,7 +312,6 @@
     .widget-cn(@color) {
       color: @color;
       padding-top: 70px;
-      font-weight: bold;
     }
     .is-red{
       .widget-content {
@@ -300,16 +325,24 @@
     }
     .is-green {
       .widget-content {
-        .widget-cn(@health-status-green);
+        .widget-cn(#666666);
       }
     }
     .is-na {
       position: relative;
       .widget-content {
-        .widget-cn(#D6DDDF);
+        .widget-cn(#999999);
         text-shadow: none;
       }
     }
+    .corner-icon:hover {
+      .ellipsis-menu::after {
+        color: #666666;
+      }
+      ul {
+        display:block;
+      }
+    }
   }
 }
 
@@ -320,23 +353,19 @@
     #dashboard-widgets {
       .span2p4 {
         float: left;
-        width: 19.47%;
-        *width: 19.47%;
-      }
-      .thumbnails > div {
-        margin-right: 5px;
+        width: 22.3%;
+        *width: 22.3%;
       }
       .img-thumbnail {
-        .caption, .hidden-info-two-line, .hidden-info-three-line {
+        .caption {
           font-size: 14px;
         }
-        .hidden-info-five-line, .hidden-info-six-line {
+        .hidden-info-two-line, .hidden-info-three-line, .hidden-info-five-line, .hidden-info-six-line {
           font-size: 12px;
         }
       }
       .links .img-thumbnail {
         .widget-content {
-          font-weight: bold;
           font-size: 12px;
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/common/export_metrics_menu.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/export_metrics_menu.hbs b/ambari-web/app/templates/common/export_metrics_menu.hbs
index c48065a..0d4ead9 100644
--- a/ambari-web/app/templates/common/export_metrics_menu.hbs
+++ b/ambari-web/app/templates/common/export_metrics_menu.hbs
@@ -16,7 +16,17 @@
 * limitations under the License.
 }}
 
-<ul class="export-graph-list pull-right dropdown-menu">
-  <li><a {{action exportGraphData view.parentView.exportToCSVArgument target="view.parentView"}}>{{t common.csv}}</a></li>
-  <li><a {{action exportGraphData target="view.parentView"}}>{{t common.json}}</a></li>
+<ul class="dropdown-menu">
+  <li><button {{action exportGraphData view.parentView.exportToCSVArgument target="view.parentView"}}
+          class="button-border">
+    <i class="icon-file-text"></i>&nbsp;{{t common.csv}}</button>
+  </li>
+  <li><button {{action exportGraphData target="view.parentView"}}
+          class="button-border">
+    <i class="icon-file-text-alt"></i>&nbsp;{{t common.json}}</button>
+  </li>
+  <li><button {{action deleteWidget target="view.parentView"}} href="#"
+          class="button-border">
+    <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+  </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard.hbs b/ambari-web/app/templates/main/dashboard.hbs
index 4b174cc..0226626 100644
--- a/ambari-web/app/templates/main/dashboard.hbs
+++ b/ambari-web/app/templates/main/dashboard.hbs
@@ -18,7 +18,7 @@
 
 <div class="row">
   <div class="summary-width col-md-12" id="dashboard-widgets-container">
-    <ul class="nav nav-tabs background-text">
+    <ul class="nav nav-tabs background-text tabs-left">
       {{#each category in view.categories}}
         {{#view view.NavItemView itemBinding="category.name" }}
           <a href="#" {{action "goToDashboardView" category.url}} >{{category.label}}</a>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/cluster_metrics.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/cluster_metrics.hbs b/ambari-web/app/templates/main/dashboard/widgets/cluster_metrics.hbs
index f22b9f7..18f2201 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/cluster_metrics.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/cluster_metrics.hbs
@@ -19,16 +19,14 @@
 <div class="cluster-metrics">
   <ul class="list-unstyled">
     <li class="img-thumbnail row">
-      <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}} href="#" {{action deleteWidget target="view"}}>
-          <i class="glyphicon-remove-sign glyphicon glyphicon-large"></i>
-      </a>
       <div class="caption col-md-10">{{view.title}}</div>
+      <div class="corner-icon col-md-1">
+        <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"
+          {{action toggleFormatsList target="view"}}></button>
+        {{view view.exportMetricsMenuView}}
+      </div>
       {{#if view.isDataLoaded}}
         {{#if view.childViews.lastObject.hasData}}
-          <a {{bindAttr class="view.isExportButtonHidden:hidden :corner-icon :col-md-1"}} href="#" {{action toggleFormatsList target="view"}}>
-            <i class="glyphicon glyphicon-save"></i>
-          </a>
-          {{view view.exportMetricsMenuView}}
         {{/if}}
       {{/if}}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/hbase_links.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/hbase_links.hbs b/ambari-web/app/templates/main/dashboard/widgets/hbase_links.hbs
index e96ed23..0ee9af2 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/hbase_links.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/hbase_links.hbs
@@ -19,10 +19,17 @@
 <div class="links">
   <ul class="list-unstyled">
     <li class="img-thumbnail row">
-      <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}}
-              href="#" {{action deleteWidget target="view"}}><i class="glyphicon-remove-sign glyphicon glyphicon-large"></i></a>
 
       <div class="caption col-md-10"> {{view.title}}</div>
+      <div class="corner-icon col-md-1">
+        <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"></button>
+        <ul class="dropdown-menu">
+          <li><button type="button" href="#" class="button-border"
+            {{action deleteWidget target="view"}}>
+            <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+          </li>
+        </ul>
+      </div>
       {{#if view.isDataLoaded}}
         <div class="widget-content">
           <table>
@@ -60,10 +67,10 @@
           {{#if view.model.quickLinks.length}}
             {{#view App.QuickLinksView contentBinding="view.model"}}
               <div class="btn-group">
-                <button class="btn btn-default btn-xs dropdown-toggle" data-toggle="dropdown" href="#">
+                <a class="dropdown-toggle" data-toggle="dropdown" href="#">
                   {{t common.more}}
                   <span class="caret"></span>
-                </button>
+                </a>
                 <ul class="dropdown-menu">
                   {{#if view.isLoaded}}
                     {{#if view.quickLinksArray}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/hdfs_links.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/hdfs_links.hbs b/ambari-web/app/templates/main/dashboard/widgets/hdfs_links.hbs
index 2feb1b0..8790ac1 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/hdfs_links.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/hdfs_links.hbs
@@ -19,11 +19,16 @@
 <div class="links">
   <ul class="list-unstyled">
   <li class="img-thumbnail row">
-    <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}} href="#" {{action deleteWidget target="view"}}>
-      <i class="glyphicon-remove-sign glyphicon glyphicon-large"></i>
-    </a>
     <div class="caption col-md-10"> {{view.title}}</div>
-
+    <div class="corner-icon col-md-1">
+      <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"></button>
+      <ul class="dropdown-menu">
+        <li><button type="button" href="#" class="button-border"
+          {{action deleteWidget target="view"}}>
+          <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+        </li>
+      </ul>
+    </div>
     <div class="widget-content" >
       {{#if view.isHAEnabled }}
         <table>
@@ -81,10 +86,10 @@
       {{#if view.model.quickLinks.length}}
         {{#view App.QuickLinksView contentBinding="view.model"}}
           <div class="btn-group">
-            <button class="btn btn-default dropdown-toggle" data-toggle="dropdown" href="#">
+            <a class="dropdown-toggle" data-toggle="dropdown" href="#">
               {{t common.more}}
               <span class="caret"></span>
-            </button>
+            </a>
               <ul class="dropdown-menu">
                 {{#if view.isLoaded}}
                   {{#if view.quickLinksArray}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/pie_chart.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/pie_chart.hbs b/ambari-web/app/templates/main/dashboard/widgets/pie_chart.hbs
index d13ddb4..a383a8b 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/pie_chart.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/pie_chart.hbs
@@ -19,17 +19,22 @@
 <div class="has-hidden-info">
   <ul class="list-unstyled">
     <li class="img-thumbnail row">
-      <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}}
-              href="#" {{action deleteWidget target="view"}}>
-        <i class="glyphicon-remove-sign glyphicon glyphicon-large"></i>
-      </a>
 
       <div class="caption col-md-10">{{view.title}}</div>
+      <div class="corner-icon col-md-1">
+        <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"></button>
+        <ul class="dropdown-menu">
+          <li><button type="button" href="#" class="button-border"
+            {{action editWidget target="view"}}>
+            <i class="icon-pencil"></i>&nbsp;{{t common.edit}}</button>
+          </li>
+          <li><button type="button" href="#" class="button-border"
+            {{action deleteWidget target="view"}}>
+            <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+          </li>
+        </ul>
+      </div>
       {{#if view.isDataLoaded}}
-        <a class="corner-icon col-md-1" href="#" {{action editWidget target="view"}}>
-          <i class="glyphicon glyphicon-edit"></i>
-        </a>
-
         <div {{bindAttr class=":hidden-info-general view.hiddenInfoClass" }}>
           <table align="center">
             {{#each line in view.hiddenInfo}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/simple_text.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/simple_text.hbs b/ambari-web/app/templates/main/dashboard/widgets/simple_text.hbs
index f10ca19..606fef2 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/simple_text.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/simple_text.hbs
@@ -19,17 +19,22 @@
 <div class="has-hidden-info">
   <ul class="list-unstyled">
     <li class="img-thumbnail row">
-      <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}}
-              href="#" {{action deleteWidget target="view"}}>
-        <i class="glyphicon-remove-sign glyphicon glyphicon-large"></i>
-      </a>
 
       <div class="caption col-md-10"> {{view.title}}</div>
+      <div class="corner-icon col-md-1">
+        <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"></button>
+        <ul class="dropdown-menu">
+          <li><button type="button" href="#" class="button-border"
+            {{action editWidget target="view"}}>
+            <i class="icon-pencil"></i>&nbsp;{{t common.edit}}</button>
+          </li>
+          <li><button type="button" href="#" class="button-border"
+            {{action deleteWidget target="view"}}>
+            <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+          </li>
+        </ul>
+      </div>
       {{#if view.isDataLoaded}}
-        <a class="corner-icon col-md-1" href="#" {{action editWidget target="view"}}>
-          <i class="glyphicon glyphicon-edit"></i>
-        </a>
-
         <div {{bindAttr class=":hidden-info-general view.hiddenInfoClass" }}>
           <table align="center">
             {{#each line in view.hiddenInfo}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/uptime.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/uptime.hbs b/ambari-web/app/templates/main/dashboard/widgets/uptime.hbs
index 5a66ba5..befc9da 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/uptime.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/uptime.hbs
@@ -19,12 +19,16 @@
 <div class="has-hidden-info">
   <ul class="list-unstyled">
     <li class="img-thumbnail row">
-      <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}}
-              href="#" {{action deleteWidget target="view"}}>
-        <i class="glyphicon-remove-sign glyphicon glyphicon-large"></i>
-      </a>
-
-      <div class="caption col-md-11"> {{view.title}} </div>
+      <div class="caption col-md-10"> {{view.title}} </div>
+      <div class="corner-icon col-md-1">
+        <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"></button>
+        <ul class="dropdown-menu">
+          <li><button type="button" href="#" class="button-border"
+            {{action deleteWidget target="view"}}>
+            <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+          </li>
+        </ul>
+      </div>
       {{#if view.isDataLoaded}}
         <div {{bindAttr class=":hidden-info-general view.hiddenInfoClass"}}>
           <table align="center">

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/templates/main/dashboard/widgets/yarn_links.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/widgets/yarn_links.hbs b/ambari-web/app/templates/main/dashboard/widgets/yarn_links.hbs
index a8612f7..2700a19 100644
--- a/ambari-web/app/templates/main/dashboard/widgets/yarn_links.hbs
+++ b/ambari-web/app/templates/main/dashboard/widgets/yarn_links.hbs
@@ -19,12 +19,16 @@
 <div class="links">
   <ul class="list-unstyled">
     <li class="img-thumbnail row">
-      <a {{bindAttr class=":corner-icon :col-md-1 view.parentView.isMoving:hidden"}}
-              href="#" {{action deleteWidget target="view"}}>
-        <i class="glyphicon-remove-sign glyphicon glyphicon-large"></i>
-      </a>
-
       <div class="caption col-md-10"> {{view.title}}</div>
+      <div class="corner-icon col-md-1">
+        <button class="dropdown-toggle ellipsis-menu button-border" data-toggle="dropdown" href="#"></button>
+        <ul class="dropdown-menu">
+          <li><button type="button" href="#" class="button-border"
+            {{action deleteWidget target="view"}}>
+            <i class="icon-trash"></i>&nbsp;{{t common.delete}}</button>
+          </li>
+        </ul>
+      </div>
       {{#if view.isDataLoaded}}
         <div class="widget-content">
           <table>
@@ -48,10 +52,10 @@
           {{#if view.model.quickLinks.length}}
             {{#view App.QuickLinksView contentBinding="view.model"}}
               <div class="btn-group">
-                <button class="btn btn-default btn-xs dropdown-toggle" data-toggle="dropdown" href="#">
+                <a class="dropdown-toggle" data-toggle="dropdown" href="#">
                   {{t common.more}}
                   <span class="caret"></span>
-                </button>
+                </a>
                 <ul class="dropdown-menu">
                   {{#if view.isLoaded}}
                     {{#if view.quickLinksArray}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/views/common/chart/pie.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/chart/pie.js b/ambari-web/app/views/common/chart/pie.js
index ce9bda4..f4c402a 100644
--- a/ambari-web/app/views/common/chart/pie.js
+++ b/ambari-web/app/views/common/chart/pie.js
@@ -19,13 +19,13 @@
 var App = require('app');
 
 App.ChartPieView = Em.View.extend({
-  w:90,
-  h:90,
+  w:100,
+  h:100,
   data:[300, 500],
   id:null,
   palette: new Rickshaw.Color.Palette({ scheme: 'munin'}),
   stroke: 'black',
-  strokeWidth: 2,
+  strokeWidth: 1,
   donut:d3.layout.pie().sort(null),
   existCenterText: false,
   centerTextColor: 'black',
@@ -69,8 +69,8 @@ App.ChartPieView = Em.View.extend({
         .append("svg:text")
         .style('fill', thisChart.get('centerTextColor'))
         .attr("stroke", thisChart.get('centerTextColor'))
-        .attr("font-size", 17)
-        .attr("transform", "translate(" + thisChart.get('w') / 2 + "," + ((thisChart.get('h') / 2) + 3) + ")")
+        .attr("font-size", 24)
+        .attr("transform", "translate(" + thisChart.get('w') / 2 + "," + ((thisChart.get('h') / 2) + 8) + ")")
         .attr("text-anchor", "middle")
         .text(function(d) {
                  return thisChart.get('data')[0] + '%';

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/views/main/dashboard/widgets/cluster_metrics_widget.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/cluster_metrics_widget.js b/ambari-web/app/views/main/dashboard/widgets/cluster_metrics_widget.js
index 384a898..fbf6d21 100644
--- a/ambari-web/app/views/main/dashboard/widgets/cluster_metrics_widget.js
+++ b/ambari-web/app/views/main/dashboard/widgets/cluster_metrics_widget.js
@@ -28,9 +28,6 @@ App.ClusterMetricsDashboardWidgetView = App.DashboardWidgetView.extend(App.Expor
 
   didInsertElement: function () {
     var self = this;
-    this.$().on('mouseleave', function () {
-      self.set('isExportMenuHidden', true);
-    });
     App.tooltip(this.$('.corner-icon > .glyphicon-save'), {
       title: Em.I18n.t('common.export')
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b575fb9/ambari-web/app/views/main/dashboard/widgets/pie_chart_widget.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/pie_chart_widget.js b/ambari-web/app/views/main/dashboard/widgets/pie_chart_widget.js
index 5dd85d1..34809b8 100644
--- a/ambari-web/app/views/main/dashboard/widgets/pie_chart_widget.js
+++ b/ambari-web/app/views/main/dashboard/widgets/pie_chart_widget.js
@@ -84,10 +84,10 @@ App.PieChartDashboardWidgetView = App.DashboardWidgetView.extend({
   content: App.ChartPieView.extend({
     model: null,  //data bind here
     id: Em.computed.alias('parentView.widgetHtmlId'), // html id
-    stroke: '#D6DDDF', //light grey
+    stroke: 'transparent',
     thresholdMin: null, //bind from parent
     thresholdMax: null,
-    innerR: 25,
+    innerR: 40,
 
     existCenterText: true,
     centerTextColor: Em.computed.alias('contentColor'),
@@ -111,20 +111,21 @@ App.PieChartDashboardWidgetView = App.DashboardWidgetView.extend({
       var thresholdMax = parseFloat(this.get('thresholdMax'));
       if (used <= thresholdMin) {
         this.set('palette', new Rickshaw.Color.Palette({
-          scheme: ['#FFFFFF', App.healthStatusGreen].reverse()
+          scheme: ['#DDDDDD', App.healthStatusGreen].reverse()
         }));
-        return App.healthStatusGreen;
       }
-      if (used <= thresholdMax) {
+      else if (used <= thresholdMax) {
         this.set('palette', new Rickshaw.Color.Palette({
-          scheme: ['#FFFFFF', App.healthStatusOrange].reverse()
+          scheme: ['#DDDDDD', App.healthStatusOrange].reverse()
         }));
-        return App.healthStatusOrange;
       }
-      this.set('palette', new Rickshaw.Color.Palette({
-        scheme: ['#FFFFFF', App.healthStatusRed].reverse()
-      }));
-      return App.healthStatusRed;
+      else {
+        this.set('palette', new Rickshaw.Color.Palette({
+          scheme: ['#DDDDDD', App.healthStatusRed].reverse()
+        }));
+      }
+      return App.widgetContentColor;
+
     }.property('data', 'thresholdMin', 'thresholdMax'),
 
     // refresh text and color when data in model changed