You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2017/11/29 16:58:18 UTC

[01/24] ambari git commit: AMBARI-22520. PERF stack: deploy fails due to incorrect stack upgrade related information (aonishuk)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-20859 167b48294 -> deb180036


AMBARI-22520. PERF stack: deploy fails due to incorrect stack upgrade related information (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b832f706
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b832f706
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b832f706

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: b832f70620f9f018421479efbf056d8e4c014339
Parents: 0b98ccd
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Nov 28 12:16:04 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Nov 28 12:16:04 2017 +0200

----------------------------------------------------------------------
 .../PERF/1.0/configuration/cluster-env.xml      | 26 ++++++++++++++++++++
 .../PERF/1.0/properties/stack_packages.json     |  8 ++++++
 2 files changed, 34 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b832f706/ambari-server/src/main/resources/stacks/PERF/1.0/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/configuration/cluster-env.xml
index f19ac52..12a0527 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/configuration/cluster-env.xml
@@ -109,4 +109,30 @@
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
+  <!-- Define stack_packages property in the base stack. DO NOT override this property for each stack version -->
+  <property>
+    <name>stack_packages</name>
+    <value/>
+    <description>Associations between component and stack-select tools.</description>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <property-file-name>stack_packages.json</property-file-name>
+      <property-file-type>json</property-file-type>
+      <read-only>true</read-only>
+      <overridable>false</overridable>
+      <visible>false</visible>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>stack_root</name>
+    <value>{"PERF":"/usr/perf"}</value>
+    <description>JSON which defines the stack root by stack name</description>
+    <value-attributes>
+      <read-only>true</read-only>
+      <overridable>false</overridable>
+      <visible>false</visible>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b832f706/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_packages.json
new file mode 100644
index 0000000..6930fe6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_packages.json
@@ -0,0 +1,8 @@
+{
+  "PERF": {
+    "stack-select": {
+    },
+    "upgrade-dependencies" : {
+    }
+  }
+}


[14/24] ambari git commit: AMBARI-22458. SNMP Trap should have sysuptime field filled - checkstyle fix

Posted by rl...@apache.org.
AMBARI-22458. SNMP Trap should have sysuptime field filled - checkstyle fix


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7fd404ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7fd404ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7fd404ad

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 7fd404add302aed0c3d2fe52a6ddacb957a992b6
Parents: 31a0f19
Author: Doroszlai, Attila <ad...@hortonworks.com>
Authored: Tue Nov 28 22:37:33 2017 +0100
Committer: Doroszlai, Attila <ad...@hortonworks.com>
Committed: Tue Nov 28 22:37:33 2017 +0100

----------------------------------------------------------------------
 .../notifications/dispatchers/AmbariSNMPDispatcher.java       | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7fd404ad/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java b/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
index 8e397ed..f4b2564 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
@@ -19,6 +19,8 @@
 package org.apache.ambari.server.notifications.dispatchers;
 
 import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
 import java.math.BigDecimal;
 import java.util.Collections;
 import java.util.HashSet;
@@ -36,15 +38,12 @@ import org.snmp4j.mp.SnmpConstants;
 import org.snmp4j.smi.Integer32;
 import org.snmp4j.smi.OID;
 import org.snmp4j.smi.OctetString;
+import org.snmp4j.smi.TimeTicks;
 import org.snmp4j.smi.VariableBinding;
 import org.snmp4j.util.DefaultPDUFactory;
-import org.snmp4j.smi.TimeTicks;
 
 import com.google.inject.Singleton;
 
-import java.lang.management.RuntimeMXBean;
-import java.lang.management.ManagementFactory;
-
 /**
  * The {@link AmbariSNMPDispatcher} class is used to dispatch {@link AlertNotification} via SNMP using predefined Ambari OIDs.
  *


[08/24] ambari git commit: AMBARI-22458 - SNMP Trap should contain sysuptime field filled (Amarnathreddy Pappu via jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22458 - SNMP Trap should contain sysuptime field filled (Amarnathreddy Pappu via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/73d99d8c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/73d99d8c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/73d99d8c

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 73d99d8c3f7f0067f06b1a0c04eccbcf9554c0bd
Parents: 3fa1289
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Nov 28 10:29:59 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Nov 28 10:30:41 2017 -0500

----------------------------------------------------------------------
 .../notifications/dispatchers/AmbariSNMPDispatcher.java  | 11 ++++++++++-
 1 file changed, 10 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/73d99d8c/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java b/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
index 596079b..8e397ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcher.java
@@ -38,9 +38,13 @@ import org.snmp4j.smi.OID;
 import org.snmp4j.smi.OctetString;
 import org.snmp4j.smi.VariableBinding;
 import org.snmp4j.util.DefaultPDUFactory;
+import org.snmp4j.smi.TimeTicks;
 
 import com.google.inject.Singleton;
 
+import java.lang.management.RuntimeMXBean;
+import java.lang.management.ManagementFactory;
+
 /**
  * The {@link AmbariSNMPDispatcher} class is used to dispatch {@link AlertNotification} via SNMP using predefined Ambari OIDs.
  *
@@ -113,7 +117,12 @@ public class AmbariSNMPDispatcher extends SNMPDispatcher {
         }
 
         pdu.setType(snmpVersion.getTrapType());
-        // Set trap oid for PDU
+    
+        RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
+        long uptimeInMillis = runtimeMXBean.getUptime();
+        pdu.add(new VariableBinding(SnmpConstants.sysUpTime, new TimeTicks(uptimeInMillis)));
+
+       // Set trap oid for PDU
         pdu.add(new VariableBinding(SnmpConstants.snmpTrapOID, new OID(AMBARI_ALERT_TRAP_OID)));
         // Set notification body and subject for PDU objects with identifiers specified in dispatch properties.
         AlertNoticeDispatchService.AlertInfo alertInfo = alertNotification.getAlertInfo();


[23/24] ambari git commit: AMBARI-22543. Log Search: backend should not return with 302 redirect if authentication fails (oleewere)

Posted by rl...@apache.org.
AMBARI-22543. Log Search: backend should not return with 302 redirect if authentication fails (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0d7f6096
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0d7f6096
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0d7f6096

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 0d7f60961fba7c5706ef3d79a7d9507d097fab6e
Parents: f6fd9b5
Author: Oliver Szabo <ol...@gmail.com>
Authored: Wed Nov 29 14:08:00 2017 +0100
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Wed Nov 29 17:38:35 2017 +0100

----------------------------------------------------------------------
 .../filters/LogsearchAuthenticationEntryPoint.java   | 15 +++++----------
 1 file changed, 5 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0d7f6096/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
index 2fe5f7b..b1ceb3a 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
@@ -24,12 +24,13 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint;
 
 public class LogsearchAuthenticationEntryPoint extends LoginUrlAuthenticationEntryPoint {
-  private static final Logger logger = Logger.getLogger(LogsearchAuthenticationEntryPoint.class);
+  private static final Logger logger = LoggerFactory.getLogger(LogsearchAuthenticationEntryPoint.class);
 
   public LogsearchAuthenticationEntryPoint(String loginFormUrl) {
     super(loginFormUrl);
@@ -38,13 +39,7 @@ public class LogsearchAuthenticationEntryPoint extends LoginUrlAuthenticationEnt
   @Override
   public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException)
     throws IOException, ServletException {
-    String ajaxRequestHeader = request.getHeader("X-Requested-With");
-    if (ajaxRequestHeader != null && ajaxRequestHeader.equalsIgnoreCase("XMLHttpRequest")) {
-      logger.debug("AJAX request. Authentication required. Returning URL=" + request.getRequestURI());
-      response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Session Timeout");
-    } else {
-      logger.debug("Redirecting to login page :" + this.getLoginFormUrl());
-      super.commence(request, response, authException);
-    }
+    logger.debug("Got 401 from request: {}", request.getRequestURI());
+    response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
   }
 }


[22/24] ambari git commit: AMBARI-22484. Stack advisor should disallow lzo enable without accepting license agreement. Additional fixes. (mpapirkovskyy)

Posted by rl...@apache.org.
AMBARI-22484. Stack advisor should disallow lzo enable without accepting license agreement. Additional fixes. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e12efe38
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e12efe38
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e12efe38

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: e12efe38a0cdc6a0e1554026949bfeef60673a5a
Parents: 82692bd
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Wed Nov 29 17:23:49 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Wed Nov 29 18:30:08 2017 +0200

----------------------------------------------------------------------
 .../stackadvisor/StackAdvisorRequest.java       |  8 +++++
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 36 +++++++++++++++-----
 .../src/main/resources/stacks/stack_advisor.py  |  9 +++++
 .../stacks/2.0.6/common/test_stack_advisor.py   | 14 ++++++--
 4 files changed, 55 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e12efe38/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
index 62b8d15..b30eec6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
@@ -123,6 +123,9 @@ public class StackAdvisorRequest {
     this.configGroups = configGroups;
   }
 
+  /**
+   * @return true if GPL license is accepted, false otherwise
+   */
   public Boolean getGplLicenseAccepted() {
     return gplLicenseAccepted;
   }
@@ -199,6 +202,11 @@ public class StackAdvisorRequest {
       return this;
     }
 
+    /**
+     * Set GPL license acceptance parameter to request.
+     * @param gplLicenseAccepted is GPL license accepted.
+     * @return stack advisor request builder.
+     */
     public StackAdvisorRequestBuilder withGPLLicenseAccepted(
         Boolean gplLicenseAccepted) {
       this.instance.gplLicenseAccepted = gplLicenseAccepted;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e12efe38/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index bfa2f5a..5584377 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -352,20 +352,38 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     self.recommendHadoopProxyUsers(configurations, services, hosts)
 
   def getLZOSupportValidationItems(self, properties, services):
+    '''
+    Checks GPL license is accepted when GPL software is used.
+    :param properties: dict of properties' name and value pairs
+    :param services: list of services
+    :return: NOT_APPLICABLE messages in case GPL license is not accepted
+    '''
     services_list = self.get_services_list(services)
 
+    validations = []
     if "HDFS" in services_list:
       lzo_allowed = services["gpl-license-accepted"]
-      property_name = "io.compression.codec.lzo.class"
-      if property_name in properties:
-        property_value = properties.get(property_name)
-        if not lzo_allowed and "com.hadoop.compression.lzo.LzoCodec" in property_value:
-          return [{"config-name": property_name, "item": self.getErrorItem(
-            "Your Ambari Server has not been configured to download LZO and install it. "
-            "LZO is GPL software and requires you to accept a license prior to use. "
-            "Please refer to this documentation to configure Ambari before proceeding.")}]
 
-    return []
+      self.validatePropertyToLZOCodec("io.compression.codecs", properties, lzo_allowed, validations)
+      self.validatePropertyToLZOCodec("io.compression.codec.lzo.class", properties, lzo_allowed, validations)
+    return validations
+
+  def validatePropertyToLZOCodec(self, property_name, properties, lzo_allowed, validations):
+    '''
+    Checks specified property contains LZO codec class and requires GPL license acceptance.
+    :param property_name: property name
+    :param properties: dict of properties' name and value pairs
+    :param lzo_allowed: is gpl license accepted
+    :param validations: list with validation failures
+    '''
+    lzo_codec_class = "com.hadoop.compression.lzo.LzoCodec"
+    if property_name in properties:
+      property_value = properties.get(property_name)
+      if not lzo_allowed and lzo_codec_class in property_value:
+        validations.append({"config-name": property_name, "item": self.getNotApplicableItem(
+          "Your Ambari Server has not been configured to download LZO and install it. "
+          "LZO is GPL software and requires you to accept a license prior to use. "
+          "Please refer to the documentation to configure Ambari before proceeding.")})
 
   def recommendHbaseConfigurations(self, configurations, clusterData, services, hosts):
     # recommendations for HBase env config

http://git-wip-us.apache.org/repos/asf/ambari/blob/e12efe38/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py
index 89f2997..0dcfff8 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -1749,6 +1749,15 @@ class DefaultStackAdvisor(StackAdvisor):
     """
     return {"level": "ERROR", "message": message}
 
+  def getNotApplicableItem(self, message):
+    '''
+    Creates report about validation error that can not be ignored. 
+    UI should not allow the proceeding of work.
+    :param message: error description.
+    :return: report about error.
+    '''
+    return {"level": "NOT_APPLICABLE", "message": message}
+
   def getComponentHostNames(self, servicesDict, serviceName, componentName):
     for service in servicesDict["services"]:
       if service["StackServices"]["service_name"] == serviceName:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e12efe38/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 6c774af..300ffe9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -2522,7 +2522,8 @@ class TestHDP206StackAdvisor(TestCase):
                   'hadoop.proxyuser.hdfs-user.groups': '*',
                   'hadoop.proxyuser.yarn-user.hosts': 'host1,host2',
                   'hadoop.proxyuser.yarn-user.groups': '*',
-                  'io.compression.codec.lzo.class': 'com.hadoop.compression.lzo.LzoCodec'}
+                  'io.compression.codec.lzo.class': 'com.hadoop.compression.lzo.LzoCodec',
+                  'io.compression.codecs': 'AnotherCodec, com.hadoop.compression.lzo.LzoCodec'}
     services = {
       'services':  [
         { 'StackServices': {'service_name': 'HDFS'}},
@@ -2558,10 +2559,17 @@ class TestHDP206StackAdvisor(TestCase):
     res_expected = [{'config-type': 'core-site',
                      'message': 'Your Ambari Server has not been configured to download LZO and install it. '
                                 'LZO is GPL software and requires you to accept a license prior to use. '
-                                'Please refer to this documentation to configure Ambari before proceeding.',
+                                'Please refer to the documentation to configure Ambari before proceeding.',
+                     'type': 'configuration',
+                     'config-name': 'io.compression.codecs',
+                     'level': 'NOT_APPLICABLE'},
+                    {'config-type': 'core-site',
+                     'message': 'Your Ambari Server has not been configured to download LZO and install it. '
+                                'LZO is GPL software and requires you to accept a license prior to use. '
+                                'Please refer to the documentation to configure Ambari before proceeding.',
                      'type': 'configuration',
                      'config-name': 'io.compression.codec.lzo.class',
-                     'level': 'ERROR'}]
+                     'level': 'NOT_APPLICABLE'}]
 
     res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, {}, configurations, services, hosts)
     self.assertEquals(res, res_expected)


[13/24] ambari git commit: AMBARI-22535 - Remove extjs From Oozie Installations By Default (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22535 - Remove extjs From Oozie Installations By Default (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/31a0f197
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/31a0f197
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/31a0f197

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 31a0f1971a2be44c5322e7817a57d9e8aa1b606c
Parents: 11d7a6a
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Nov 28 13:50:49 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Nov 28 15:27:27 2017 -0500

----------------------------------------------------------------------
 .../OOZIE/4.2.0.3.0/metainfo.xml                |  9 ++---
 .../stacks/HDP/2.6/services/OOZIE/metainfo.xml  | 37 ++++++++++++++++++++
 2 files changed, 39 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/31a0f197/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/metainfo.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/metainfo.xml
index 47b3705..11ef853 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/metainfo.xml
@@ -25,7 +25,8 @@
         <enabled>true</enabled>
       </credential-store>
       <displayName>Oozie</displayName>
-      <comment>System for workflow coordination and execution of Apache Hadoop jobs.  This also includes the installation of the optional Oozie Web Console which relies on and will install the &lt;a target="_blank" href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
+      <comment>
+        System for workflow coordination and execution of Apache Hadoop jobs.
       </comment>
       <version>4.2.0.3.0</version>
       <components>
@@ -125,9 +126,6 @@
             <package>
               <name>zip</name>
             </package>
-            <package>
-              <name>extjs</name>
-            </package>
           </packages>
         </osSpecific>
         <osSpecific>
@@ -150,9 +148,6 @@
             <package>
               <name>falcon-${stack_version}</name>
             </package>
-            <package>
-              <name>extjs</name>
-            </package>
           </packages>
         </osSpecific>
       </osSpecifics>

http://git-wip-us.apache.org/repos/asf/ambari/blob/31a0f197/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/metainfo.xml
index c9dfaad..9b12d01 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/metainfo.xml
@@ -20,7 +20,44 @@
   <services>
     <service>
       <name>OOZIE</name>
+      <comment>
+        System for workflow coordination and execution of Apache Hadoop jobs.
+      </comment>      
       <version>4.2.0</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>zip</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,amazon2015,redhat6,redhat7,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>oozie_${stack_version}</name>
+            </package>
+            <package>
+              <name>falcon_${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>oozie-${stack_version}</name>
+            </package>
+            <package>
+              <name>falcon-${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
       <configuration-dependencies>
         <config-type>application-properties</config-type>
       </configuration-dependencies>


[03/24] ambari git commit: AMBARI-22526. Log Feeder: handle multiple '=' (split char) in key/value filter (oleewere)

Posted by rl...@apache.org.
AMBARI-22526. Log Feeder: handle multiple '=' (split char) in key/value filter (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33ee1a73
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33ee1a73
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33ee1a73

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 33ee1a7381237a6cd4a5159d0f8ddc601f342529
Parents: 530f1d5
Author: Oliver Szabo <ol...@gmail.com>
Authored: Tue Nov 28 15:08:51 2017 +0100
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Tue Nov 28 15:17:31 2017 +0100

----------------------------------------------------------------------
 .../java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/33ee1a73/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
index 670b1c3..8e5aee8 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
@@ -128,14 +128,14 @@ public class FilterKeyValue extends Filter {
 
   private String[] getNameValue(String nv) {
     String splitPattern = Pattern.quote(valueSplit);
-    return nv.split(splitPattern);
+    return nv.split(splitPattern, 2);
   }
 
   private void logParseError(String inputStr) {
     errorMetric.value++;
     String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR";
     LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStr.length() + ", input=" +
-        input.getShortDescription() + ". First upto 100 characters=" + StringUtils.abbreviate(inputStr, 100), null, LOG,
+        input.getShortDescription() + ". First upto 200 characters=" + StringUtils.abbreviate(inputStr, 200), null, LOG,
         Level.ERROR);
   }
 


[09/24] ambari git commit: AMBARI-22523. Service config changes on hdp + hdf cluster throws Consistency check failed. (mpapirkovskyy)

Posted by rl...@apache.org.
AMBARI-22523. Service config changes on hdp + hdf cluster throws Consistency check failed. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/020e1528
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/020e1528
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/020e1528

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 020e15281e6466ae436d228dcc7428c86560882a
Parents: 73d99d8
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Mon Nov 20 17:19:12 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Tue Nov 28 18:31:29 2017 +0200

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.6/services/stack_advisor.py   |  3 +++
 .../test/python/stacks/2.6/common/test_stack_advisor.py  | 11 +++++++++++
 2 files changed, 14 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/020e1528/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index fc12d37..6951282 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -640,6 +640,9 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
         else:
             druid_broker_host_port = "localhost:8083"
 
+        druid_metadata_uri = ""
+        druid_metadata_user = ""
+        druid_metadata_type = ""
         if 'druid-common' in services['configurations']:
             druid_metadata_uri = services['configurations']['druid-common']['properties']['druid.metadata.storage.connector.connectURI']
             druid_metadata_type = services['configurations']['druid-common']['properties']['druid.metadata.storage.type']

http://git-wip-us.apache.org/repos/asf/ambari/blob/020e1528/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
index f4c5508..b112ffb 100644
--- a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
@@ -1949,6 +1949,17 @@ class TestHDP26StackAdvisor(TestCase):
     self.stackAdvisor.recommendHIVEConfigurations(recommendedConfigurations, clusterData, services, hosts)
     self.assertEquals(recommendedConfigurations, expected)
 
+    # case there are not druid-common configs present
+    del services['configurations']['druid-common']
+    expected['hive-interactive-site']['properties']['hive.druid.broker.address.default'] = 'c6401.ambari.apache.org:8083'
+    expected['hive-interactive-site']['properties']['hive.druid.metadata.uri'] = ''
+    expected['hive-interactive-site']['properties']['hive.druid.metadata.username'] = ''
+    expected['hive-interactive-site']['properties']['hive.druid.metadata.db.type'] = ''
+
+    recommendedConfigurations = {}
+    self.stackAdvisor.recommendHIVEConfigurations(recommendedConfigurations, clusterData, services, hosts)
+    self.assertEquals(recommendedConfigurations, expected)
+
 
   def test_recommendHBASEConfigurations(self):
     configurations = {


[19/24] ambari git commit: AMBARI-22480. Validate blueprint does not allow lzo enable without setup with license agreement. (mpapirkovskyy)

Posted by rl...@apache.org.
AMBARI-22480. Validate blueprint does not allow lzo enable without setup with license agreement. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9bbc0ef7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9bbc0ef7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9bbc0ef7

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 9bbc0ef720bc91ff39701955984bb9635d811c59
Parents: 41853a1
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Wed Nov 29 16:55:34 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Wed Nov 29 18:30:08 2017 +0200

----------------------------------------------------------------------
 .../internal/BlueprintResourceProvider.java     |  3 +-
 .../ambari/server/topology/Blueprint.java       |  2 +-
 .../ambari/server/topology/BlueprintImpl.java   |  2 +-
 .../server/topology/BlueprintValidator.java     |  2 +-
 .../server/topology/BlueprintValidatorImpl.java | 21 +++++-
 .../server/topology/BlueprintImplTest.java      | 69 ++++++++++++++++++--
 .../topology/BlueprintValidatorImplTest.java    | 10 ++-
 7 files changed, 95 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
index 67f5448..8f4d62e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
@@ -56,6 +56,7 @@ import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.topology.Blueprint;
 import org.apache.ambari.server.topology.BlueprintFactory;
+import org.apache.ambari.server.topology.GPLLicenseNotAcceptedException;
 import org.apache.ambari.server.topology.InvalidTopologyException;
 import org.apache.ambari.server.topology.SecurityConfiguration;
 import org.apache.ambari.server.topology.SecurityConfigurationFactory;
@@ -519,7 +520,7 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
 
         try {
           blueprint.validateRequiredProperties();
-        } catch (InvalidTopologyException e) {
+        } catch (InvalidTopologyException | GPLLicenseNotAcceptedException e) {
           throw new IllegalArgumentException("Blueprint configuration validation failed: " + e.getMessage(), e);
         }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
index 779a02d..6ed38f8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
@@ -152,7 +152,7 @@ public interface Blueprint {
    *
    * @throws InvalidTopologyException if the blueprint doesn't contain all required properties
    */
-  void validateRequiredProperties() throws InvalidTopologyException;
+  void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException;
 
   /**
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
index 8c83ed6..6801e33 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
@@ -353,7 +353,7 @@ public class BlueprintImpl implements Blueprint {
    * @throws InvalidTopologyException if the blueprint configuration is invalid
    */
   @Override
-  public void validateRequiredProperties() throws InvalidTopologyException {
+  public void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException {
     validator.validateRequiredProperties();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
index be194df..156fe8c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
@@ -37,5 +37,5 @@ public interface BlueprintValidator {
    *
    * @throws InvalidTopologyException if required properties are not set in blueprint
    */
-  void validateRequiredProperties() throws InvalidTopologyException;
+  void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
index 1a43b85..87b5936 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
@@ -26,6 +26,7 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.controller.internal.Stack;
 import org.apache.ambari.server.state.AutoDeployInfo;
 import org.apache.ambari.server.state.DependencyConditionInfo;
@@ -35,15 +36,24 @@ import org.apache.ambari.server.utils.VersionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.inject.Inject;
+
 /**
  * Default blueprint validator.
  */
+@StaticallyInject
 public class BlueprintValidatorImpl implements BlueprintValidator {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(BlueprintValidatorImpl.class);
   private final Blueprint blueprint;
   private final Stack stack;
 
+  public static final String LZO_CODEC_CLASS_PROPERTY_NAME = "io.compression.codec.lzo.class";
+  public static final String LZO_CODEC_CLASS = "com.hadoop.compression.lzo.LzoCodec";
+
+  @Inject
+  private static org.apache.ambari.server.configuration.Configuration configuration;
+
   public BlueprintValidatorImpl(Blueprint blueprint) {
     this.blueprint = blueprint;
     this.stack = blueprint.getStack();
@@ -84,13 +94,17 @@ public class BlueprintValidatorImpl implements BlueprintValidator {
   }
 
   @Override
-  public void validateRequiredProperties() throws InvalidTopologyException {
+  public void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException {
 
     // we don't want to include default stack properties so we can't just use hostGroup full properties
     Map<String, Map<String, String>> clusterConfigurations = blueprint.getConfiguration().getProperties();
 
     // we need to have real passwords, not references
     if (clusterConfigurations != null) {
+
+      // need to reject blueprints that have LZO enabled if the Ambari Server hasn't been configured for it
+      boolean gplEnabled = configuration.getGplLicenseAccepted();
+
       StringBuilder errorMessage = new StringBuilder();
       boolean containsSecretReferences = false;
       for (Map.Entry<String, Map<String, String>> configEntry : clusterConfigurations.entrySet()) {
@@ -100,6 +114,11 @@ public class BlueprintValidatorImpl implements BlueprintValidator {
             String propertyName = propertyEntry.getKey();
             String propertyValue = propertyEntry.getValue();
             if (propertyValue != null) {
+              if (!gplEnabled && configType.equals("core-site") && propertyName.equals(LZO_CODEC_CLASS_PROPERTY_NAME)
+                  && propertyValue.contains(LZO_CODEC_CLASS)) {
+                throw new GPLLicenseNotAcceptedException("Your Ambari server has not been configured to download LZO GPL software. " +
+                    "Please refer to documentation to configure Ambari before proceeding.");
+              }
               if (SecretReference.isSecret(propertyValue)) {
                 errorMessage.append("  Config:" + configType + " Property:" + propertyName + "\n");
                 containsSecretReferences = true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
index 6ac74a3..6d3179e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
@@ -21,11 +21,13 @@ package org.apache.ambari.server.topology;
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.mock;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.lang.reflect.Field;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -106,13 +108,14 @@ public class BlueprintImplTest {
 
   @Test
   public void testValidateConfigurations__basic_positive() throws Exception {
+    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     expect(group1.getCardinality()).andReturn("1").atLeastOnce();
     expect(group1.getComponents()).andReturn(Arrays.asList(new Component("c1"), new Component("c2"))).atLeastOnce();
     expect(group2.getCardinality()).andReturn("1").atLeastOnce();
     expect(group2.getComponents()).andReturn(Arrays.asList(new Component("c1"), new Component("c3"))).atLeastOnce();
     expect(group2.getConfiguration()).andReturn(EMPTY_CONFIGURATION).atLeastOnce();
 
-    replay(stack, group1, group2);
+    replay(stack, group1, group2, serverConfig);
 
     Map<String, String> category2Props = new HashMap<>();
     properties.put("category2", category2Props);
@@ -123,7 +126,7 @@ public class BlueprintImplTest {
     blueprint.validateRequiredProperties();
     BlueprintEntity entity = blueprint.toEntity();
 
-    verify(stack, group1, group2);
+    verify(stack, group1, group2, serverConfig);
     assertTrue(entity.getSecurityType() == SecurityType.KERBEROS);
     assertTrue(entity.getSecurityDescriptorReference().equals("testRef"));
   }
@@ -134,6 +137,8 @@ public class BlueprintImplTest {
     Map<String, String> group2Category2Props = new HashMap<>();
     group2Props.put("category2", group2Category2Props);
     group2Category2Props.put("prop2", "val");
+
+    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     // set config for group2 which contains a required property
     Configuration group2Configuration = new Configuration(group2Props, EMPTY_ATTRIBUTES, configuration);
     expect(group2.getConfiguration()).andReturn(group2Configuration).atLeastOnce();
@@ -155,11 +160,11 @@ public class BlueprintImplTest {
     properties.put("hadoop-env", hadoopProps);
     hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP:group1%");
     hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP:group2%");
-    replay(stack, group1, group2);
+    replay(stack, group1, group2, serverConfig);
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
     BlueprintEntity entity = blueprint.toEntity();
-    verify(stack, group1, group2);
+    verify(stack, group1, group2, serverConfig);
     assertTrue(entity.getSecurityType() == SecurityType.NONE);
     assertTrue(entity.getSecurityDescriptorReference() == null);
   }
@@ -277,17 +282,55 @@ public class BlueprintImplTest {
     verify(stack, group1, group2);
   }
   @Test(expected = InvalidTopologyException.class)
-  public void testValidateConfigurations__secretReference() throws InvalidTopologyException {
+  public void testValidateConfigurations__secretReference() throws InvalidTopologyException,
+      GPLLicenseNotAcceptedException, NoSuchFieldException, IllegalAccessException {
     Map<String, Map<String, String>> group2Props = new HashMap<>();
     Map<String, String> group2Category2Props = new HashMap<>();
+
+    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     group2Props.put("category2", group2Category2Props);
     group2Category2Props.put("prop2", "val");
     hdfsProps.put("secret", "SECRET:hdfs-site:1:test");
-    replay(stack, group1, group2);
+    replay(stack, group1, group2, serverConfig);
 
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
-    verify(stack, group1, group2);
+    verify(stack, group1, group2, serverConfig);
+  }
+
+  @Test(expected = GPLLicenseNotAcceptedException.class)
+  public void testValidateConfigurations__gplIsNotAllowed() throws InvalidTopologyException,
+      GPLLicenseNotAcceptedException, NoSuchFieldException, IllegalAccessException {
+    Map<String, Map<String, String>> lzoProperties = new HashMap<>();
+    lzoProperties.put("core-site", new HashMap<String, String>(){{
+      put(BlueprintValidatorImpl.LZO_CODEC_CLASS_PROPERTY_NAME, BlueprintValidatorImpl.LZO_CODEC_CLASS);
+    }});
+    Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
+
+    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(false);
+    replay(stack, group1, group2, serverConfig);
+
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null);
+    blueprint.validateRequiredProperties();
+    verify(stack, group1, group2, serverConfig);
+  }
+
+  @Test
+  public void testValidateConfigurations__gplISAllowed() throws InvalidTopologyException,
+      GPLLicenseNotAcceptedException, NoSuchFieldException, IllegalAccessException {
+    Map<String, Map<String, String>> lzoProperties = new HashMap<>();
+    lzoProperties.put("core-site", new HashMap<String, String>(){{
+      put(BlueprintValidatorImpl.LZO_CODEC_CLASS_PROPERTY_NAME, BlueprintValidatorImpl.LZO_CODEC_CLASS);
+    }});
+    Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
+
+    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
+    expect(group2.getConfiguration()).andReturn(EMPTY_CONFIGURATION).atLeastOnce();
+    replay(stack, group1, group2, serverConfig);
+
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null);
+    blueprint.validateRequiredProperties();
+    verify(stack, group1, group2, serverConfig);
   }
 
   @Test
@@ -314,6 +357,18 @@ public class BlueprintImplTest {
     verify(stack, setting);
   }
 
+  public static org.apache.ambari.server.configuration.Configuration setupConfigurationWithGPLLicense(boolean isGPLAllowed)
+      throws NoSuchFieldException, IllegalAccessException {
+    org.apache.ambari.server.configuration.Configuration serverConfig =
+        mock(org.apache.ambari.server.configuration.Configuration.class);
+    expect(serverConfig.getGplLicenseAccepted()).andReturn(isGPLAllowed).atLeastOnce();
+
+    Field field = BlueprintValidatorImpl.class.getDeclaredField("configuration");
+    field.setAccessible(true);
+    field.set(null, serverConfig);
+    return serverConfig;
+  }
+
   //todo: ensure coverage for these existing tests
 
   //  private void validateEntity(BlueprintEntity entity, boolean containsConfig) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bbc0ef7/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java
index a706428..75a9d6b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java
@@ -228,6 +228,9 @@ public class BlueprintValidatorImplTest {
 
     services.addAll(Arrays.asList("HIVE"));
 
+    org.apache.ambari.server.configuration.Configuration serverConfig =
+        BlueprintImplTest.setupConfigurationWithGPLLicense(true);
+
     Configuration config = new Configuration(new HashMap<>(), new HashMap<>());
     expect(group1.getConfiguration()).andReturn(config).anyTimes();
 
@@ -237,7 +240,7 @@ public class BlueprintValidatorImplTest {
 
     expect(blueprint.getHostGroupsForComponent("HIVE_METASTORE")).andReturn(Collections.singleton(group1)).anyTimes();
 
-    replay(blueprint, stack, group1, group2, dependency1);
+    replay(blueprint, stack, group1, group2, dependency1, serverConfig);
     BlueprintValidator validator = new BlueprintValidatorImpl(blueprint);
     validator.validateRequiredProperties();
   }
@@ -252,6 +255,9 @@ public class BlueprintValidatorImplTest {
 
     services.addAll(Arrays.asList("OOZIE"));
 
+    org.apache.ambari.server.configuration.Configuration serverConfig =
+        BlueprintImplTest.setupConfigurationWithGPLLicense(true);
+
     Configuration config = new Configuration(new HashMap<>(), new HashMap<>());
     expect(group1.getConfiguration()).andReturn(config).anyTimes();
 
@@ -261,7 +267,7 @@ public class BlueprintValidatorImplTest {
 
     expect(blueprint.getHostGroupsForComponent("OOZIE_SERVER")).andReturn(Collections.singleton(group1)).anyTimes();
 
-    replay(blueprint, stack, group1, group2, dependency1);
+    replay(blueprint, stack, group1, group2, dependency1, serverConfig);
     BlueprintValidator validator = new BlueprintValidatorImpl(blueprint);
     validator.validateRequiredProperties();
   }


[04/24] ambari git commit: AMBARI-22514. Initial implementation of Schedulable document deletion & archiving for Infra Solr (Krisztian Kasa via oleewere)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
new file mode 100644
index 0000000..4d5ebf1
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.regex.Matcher;
+
+import static org.apache.ambari.infra.job.archive.SolrQueryBuilder.PARAMETER_PATTERN;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.hamcrest.Matchers.hasSize;
+import static org.junit.Assert.assertThat;
+
+public class SolrQueryBuilderTest {
+  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{
+    put("logtime", "2017-10-02'T'10:00:11.634Z");
+    put("id", "1");
+  }});
+
+  @Test
+  public void testDefaultQuery() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder()
+            .build();
+    assertThat(solrQuery.getQuery(), is("*:*"));
+  }
+
+  @Test
+  public void testSetQuery() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder()
+            .setQueryText("logtime:[* TO \"${end}\"]")
+            .setEndValue("2017-11-27'T'10:12:11.372Z")
+            .build();
+    assertThat(solrQuery.getQuery(), is("logtime:[* TO \"2017-11-27'T'10:12:11.372Z\"]"));
+  }
+
+  @Test
+  public void testSetFilterQuery() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder()
+            .setFilterQueryText("(logtime:\"${logtime}\" AND id:{\"${id}\" TO *]) OR logtime:{\"${logtime}\" TO \"${end}\"]")
+            .setDocument(DOCUMENT)
+            .setEndValue("2017-11-27'T'10:12:11.372Z")
+            .build();
+    assertThat(solrQuery.getFilterQueries()[0], is("(logtime:\"2017-10-02'T'10:00:11.634Z\" AND id:{\"1\" TO *]) OR logtime:{\"2017-10-02'T'10:00:11.634Z\" TO \"2017-11-27'T'10:12:11.372Z\"]"));
+  }
+
+  @Test
+  public void testSetFilterQueryWhenDocumentIsNull() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder()
+            .setFilterQueryText("(logtime:\"${logtime}\" AND id:{\"${id}\" TO *]) OR logtime:{\"${logtime}\" TO \"${end}\"]")
+            .setEndValue("2017-11-27'T'10:12:11.372Z")
+            .build();
+    assertThat(solrQuery.getFilterQueries(), is(nullValue()));
+  }
+
+  @Test
+  public void testSetFilterQueryWhenEndValueIsNull() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder()
+            .setFilterQueryText("logtime:\"${logtime}\" AND id:{\"${id}\" TO *]")
+            .setDocument(DOCUMENT)
+            .build();
+    assertThat(solrQuery.getFilterQueries()[0], is("logtime:\"2017-10-02'T'10:00:11.634Z\" AND id:{\"1\" TO *]"));
+  }
+
+  @Test
+  public void testSetFilterQueryWhenQueryFilterIsNullButDocumentIsNot() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder()
+            .setDocument(DOCUMENT)
+            .build();
+    assertThat(solrQuery.getFilterQueries(), is(nullValue()));
+  }
+
+  @Test
+  public void testRegex() throws Exception {
+    Matcher matcher = PARAMETER_PATTERN.matcher("(logtime:\"${logtime}\" AND id:{\"${id}\" TO *]) OR logtime:{\"${logtime}\" TO \"${end}\"]");
+    List<String> parameters = new ArrayList<>();
+    while (matcher.find())
+      parameters.add(matcher.group());
+
+    assertThat(parameters, hasSize(4));
+    assertThat(parameters.get(0), is("${logtime}"));
+    assertThat(parameters.get(1), is("${id}"));
+    assertThat(parameters.get(2), is("${logtime}"));
+    assertThat(parameters.get(3), is("${end}"));
+  }
+
+  @Test
+  public void testSort() throws Exception {
+    SolrQuery solrQuery = new SolrQueryBuilder().addSort("logtime", "id").build();
+    assertThat(solrQuery.getSorts().get(0).getItem(), is("logtime"));
+    assertThat(solrQuery.getSorts().get(1).getItem(), is("id"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
index fb7ddf2..1ae57bb 100644
--- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
+++ b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
@@ -29,4 +29,4 @@ logfeeder.cache.key.field=log_message
 logfeeder.cache.dedup.interval=1000
 logfeeder.cache.last.dedup.enabled=true
 logsearch.config.zk_connect_string=localhost:9983
-logfeeder.include.default.level=FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN
+logfeeder.include.default.level=FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN
\ No newline at end of file


[05/24] ambari git commit: AMBARI-22514. Initial implementation of Schedulable document deletion & archiving for Infra Solr (Krisztian Kasa via oleewere)

Posted by rl...@apache.org.
AMBARI-22514. Initial implementation of Schedulable document deletion & archiving for Infra Solr (Krisztian Kasa via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/393fdb80
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/393fdb80
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/393fdb80

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 393fdb8048ff579e8a55cd1b477a23d1bf105576
Parents: 2bf3c8e
Author: Krisztian Kasa <ka...@gmail.com>
Authored: Tue Nov 28 15:45:22 2017 +0100
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Tue Nov 28 15:47:59 2017 +0100

----------------------------------------------------------------------
 ambari-infra/ambari-infra-manager/pom.xml       |  11 ++
 .../infra/job/archive/CompositeFileAction.java  |  46 +++++
 .../ambari/infra/job/archive/Document.java      |  54 +++++
 .../infra/job/archive/DocumentDestination.java  |  23 +++
 .../archive/DocumentExportConfiguration.java    | 118 +++++++++++
 .../job/archive/DocumentExportJobListener.java  |  35 ++++
 .../job/archive/DocumentExportProperties.java   | 112 +++++++++++
 .../job/archive/DocumentExportStepListener.java |  47 +++++
 .../infra/job/archive/DocumentExporter.java     |  99 ++++++++++
 .../infra/job/archive/DocumentItemReader.java   | 135 +++++++++++++
 .../infra/job/archive/DocumentItemWriter.java   |  25 +++
 .../infra/job/archive/DocumentIterator.java     |  25 +++
 .../infra/job/archive/DocumentSource.java       |  24 +++
 .../ambari/infra/job/archive/FileAction.java    |  25 +++
 .../job/archive/LocalDocumentItemWriter.java    |  72 +++++++
 .../ambari/infra/job/archive/S3Properties.java  |  64 ++++++
 .../ambari/infra/job/archive/S3Uploader.java    |  51 +++++
 .../infra/job/archive/SolrDocumentIterator.java |  90 +++++++++
 .../infra/job/archive/SolrDocumentSource.java   |  68 +++++++
 .../infra/job/archive/SolrQueryBuilder.java     | 115 +++++++++++
 .../infra/job/archive/SolrQueryProperties.java  |  69 +++++++
 .../infra/job/archive/TarGzCompressor.java      |  50 +++++
 .../apache/ambari/infra/manager/JobManager.java |  21 +-
 .../src/main/resources/infra-manager.properties |  12 ++
 .../src/main/resources/log4j2.xml               |   2 +-
 .../infra/job/archive/DocumentExporterTest.java | 147 ++++++++++++++
 .../job/archive/DocumentItemReaderTest.java     | 197 +++++++++++++++++++
 .../archive/LocalDocumentItemWriterTest.java    |  98 +++++++++
 .../infra/job/archive/SolrQueryBuilderTest.java | 113 +++++++++++
 .../test-config/logfeeder/logfeeder.properties  |   2 +-
 30 files changed, 1940 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/pom.xml b/ambari-infra/ambari-infra-manager/pom.xml
index aa86da8..67bf7d1 100644
--- a/ambari-infra/ambari-infra-manager/pom.xml
+++ b/ambari-infra/ambari-infra-manager/pom.xml
@@ -141,6 +141,12 @@
       <version>3.4</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.hamcrest</groupId>
+      <artifactId>hamcrest-all</artifactId>
+      <version>1.3</version>
+      <scope>test</scope>
+    </dependency>
     <!-- Spring dependencies -->
     <dependency>
       <groupId>org.springframework</groupId>
@@ -417,6 +423,11 @@
       <groupId>com.google.guava</groupId>
       <version>20.0</version>
     </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>1.11.5</version>
+    </dependency>
   </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java
new file mode 100644
index 0000000..84ce160
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/CompositeFileAction.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import java.io.File;
+import java.util.List;
+
+import static java.util.Arrays.asList;
+
+public class CompositeFileAction implements FileAction {
+
+  private final List<FileAction> actions;
+
+  public CompositeFileAction(FileAction... actions) {
+    this.actions = asList(actions);
+  }
+
+  public void add(FileAction action) {
+    actions.add(action);
+  }
+
+  @Override
+  public File perform(File inputFile) {
+    File file = inputFile;
+    for (FileAction action : actions) {
+      file = action.perform(file);
+    }
+    return file;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
new file mode 100644
index 0000000..84f5ece
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static java.util.Collections.unmodifiableMap;
+
+// TODO: create entities for each solr collections
+public class Document {
+  private final Map<String, String> fieldMap;
+
+  private Document() {
+    fieldMap = new HashMap<>();
+  }
+
+  public Document(Map<String, String> fieldMap) {
+    this.fieldMap = unmodifiableMap(fieldMap);
+  }
+
+  public String get(String key) {
+    return fieldMap.get(key);
+  }
+
+  @JsonAnyGetter
+  private Map<String, String> getFieldMap() {
+    return fieldMap;
+  }
+
+  @JsonAnySetter
+  private void put(String key, String value) {
+    fieldMap.put(key, value);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java
new file mode 100644
index 0000000..f647a36
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentDestination.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+public interface DocumentDestination {
+  DocumentItemWriter open(Document firstDocument);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportConfiguration.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportConfiguration.java
new file mode 100644
index 0000000..69f41d3
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportConfiguration.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.JobScope;
+import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.StepScope;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import javax.inject.Inject;
+import java.io.File;
+import java.nio.file.Paths;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+
+import static org.apache.ambari.infra.job.archive.SolrDocumentSource.SOLR_DATETIME_FORMATTER;
+import static org.apache.commons.lang.StringUtils.isBlank;
+
+@Configuration
+public class DocumentExportConfiguration {
+  private static final Logger LOG = LoggerFactory.getLogger(DocumentExportConfiguration.class);
+  private static final DateTimeFormatter FILENAME_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH_mm_ss.SSSX");
+
+  @Inject
+  private DocumentExportProperties properties;
+
+  @Inject
+  private StepBuilderFactory steps;
+
+  @Inject
+  private JobBuilderFactory jobs;
+
+
+
+  @Bean
+  public Job logExportJob(@Qualifier("exportStep") Step logExportStep) {
+    return jobs.get("solr_data_export").listener(new DocumentExportJobListener()).start(logExportStep).build();
+  }
+
+  @Bean
+  @JobScope
+  public Step exportStep(DocumentExporter documentExporter) {
+    return steps.get("export")
+            .tasklet(documentExporter)
+            .listener(new DocumentExportStepListener(properties))
+            .build();
+  }
+
+  @Bean
+  @StepScope
+  public DocumentExporter getDocumentExporter(DocumentItemReader documentItemReader,
+                                              @Value("#{stepExecution.jobExecution.id}") String jobId) {
+    File path = Paths.get(
+            properties.getDestinationDirectoryPath(),
+            String.format("%s_%s", properties.getQuery().getCollection(), jobId)).toFile(); // TODO: add end date
+    LOG.info("Destination directory path={}", path);
+    if (!path.exists()) {
+      if (!path.mkdirs()) {
+        LOG.warn("Unable to create directory {}", path);
+      }
+    }
+
+    CompositeFileAction fileAction = new CompositeFileAction(new TarGzCompressor());
+
+    return new DocumentExporter(
+            documentItemReader,
+            firstDocument -> new LocalDocumentItemWriter(
+                    new File(path, String.format("%s_-_%s.json",
+                            properties.getQuery().getCollection(),
+                            firstDocument.get(properties.getFileNameSuffixColumn()))),
+                    fileAction),
+            properties.getWriteBlockSize());
+  }
+
+  @Bean
+  @StepScope
+  public DocumentItemReader reader(DocumentSource documentSource) {
+    return new DocumentItemReader(documentSource, properties.getReadBlockSize());
+  }
+
+  @Bean
+  @StepScope
+  public DocumentSource logSource(@Value("#{jobParameters[endDate]}") String endDateText) {
+    OffsetDateTime endDate = OffsetDateTime.now(ZoneOffset.UTC);
+    if (!isBlank(endDateText))
+      endDate = OffsetDateTime.parse(endDateText);
+
+    return new SolrDocumentSource(
+            properties.getZooKeeperSocket(),
+            properties.getQuery(),
+            SOLR_DATETIME_FORMATTER.format(endDate));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportJobListener.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportJobListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportJobListener.java
new file mode 100644
index 0000000..f1df46c
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportJobListener.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+
+public class DocumentExportJobListener implements JobExecutionListener {
+  @Override
+  public void beforeJob(JobExecution jobExecution) {
+
+  }
+
+  @Override
+  public void afterJob(JobExecution jobExecution) {
+    jobExecution.setExitStatus(new ExitStatus(ExitStatus.COMPLETED.getExitCode()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportProperties.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportProperties.java
new file mode 100644
index 0000000..d6301c0
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportProperties.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.hibernate.validator.constraints.NotBlank;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.PropertySource;
+
+import javax.validation.constraints.Min;
+
+import static org.apache.commons.lang.StringUtils.isBlank;
+
+@Configuration
+@PropertySource(value = {"classpath:infra-manager.properties"})
+@ConfigurationProperties(prefix = "infra-manager.jobs.solr_data_export")
+public class DocumentExportProperties {
+  @NotBlank
+  private String zooKeeperSocket;
+  @Min(1)
+  private int readBlockSize;
+  @Min(1)
+  private int writeBlockSize;
+  @NotBlank
+  private String destinationDirectoryPath;
+  @NotBlank
+  private String fileNameSuffixColumn;
+  private SolrQueryProperties query;
+
+  public String getZooKeeperSocket() {
+    return zooKeeperSocket;
+  }
+
+  public void setZooKeeperSocket(String zooKeeperSocket) {
+    this.zooKeeperSocket = zooKeeperSocket;
+  }
+
+  public int getReadBlockSize() {
+    return readBlockSize;
+  }
+
+  public void setReadBlockSize(int readBlockSize) {
+    this.readBlockSize = readBlockSize;
+  }
+
+  public int getWriteBlockSize() {
+    return writeBlockSize;
+  }
+
+  public void setWriteBlockSize(int writeBlockSize) {
+    this.writeBlockSize = writeBlockSize;
+  }
+
+  public String getDestinationDirectoryPath() {
+    return destinationDirectoryPath;
+  }
+
+  public void setDestinationDirectoryPath(String destinationDirectoryPath) {
+    this.destinationDirectoryPath = destinationDirectoryPath;
+  }
+
+  public void apply(JobParameters jobParameters) {
+    // TODO: solr query params
+    zooKeeperSocket = jobParameters.getString("zooKeeperSocket", zooKeeperSocket);
+    readBlockSize = getIntJobParameter(jobParameters, "readBlockSize", readBlockSize);
+    writeBlockSize = getIntJobParameter(jobParameters, "writeBlockSize", writeBlockSize);
+    destinationDirectoryPath = jobParameters.getString("destinationDirectoryPath", destinationDirectoryPath);
+    query.setCollection(jobParameters.getString("collection", query.getCollection()));
+    query.setQueryText(jobParameters.getString("queryText", query.getQueryText()));
+    query.setFilterQueryText(jobParameters.getString("filterQueryText", query.getFilterQueryText()));
+  }
+
+  private int getIntJobParameter(JobParameters jobParameters, String parameterName, int defaultValue) {
+    String writeBlockSizeText = jobParameters.getString(parameterName);
+    if (isBlank(writeBlockSizeText))
+      return defaultValue;
+    return this.writeBlockSize = Integer.parseInt(writeBlockSizeText);
+  }
+
+  public String getFileNameSuffixColumn() {
+    return fileNameSuffixColumn;
+  }
+
+  public void setFileNameSuffixColumn(String fileNameSuffixColumn) {
+    this.fileNameSuffixColumn = fileNameSuffixColumn;
+  }
+
+  public SolrQueryProperties getQuery() {
+    return query;
+  }
+
+  public void setQuery(SolrQueryProperties query) {
+    this.query = query;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportStepListener.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportStepListener.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportStepListener.java
new file mode 100644
index 0000000..3bab6d5
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExportStepListener.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.StepExecutionListener;
+
+public class DocumentExportStepListener implements StepExecutionListener {
+  private static final Logger LOG = LoggerFactory.getLogger(DocumentExportStepListener.class);
+
+  private final DocumentExportProperties properties;
+
+  public DocumentExportStepListener(DocumentExportProperties properties) {
+    this.properties = properties;
+  }
+
+  @Override
+  public void beforeStep(StepExecution stepExecution) {
+    properties.apply(stepExecution.getJobParameters());
+    LOG.info("LogExport step - before step execution");
+  }
+
+  @Override
+  public ExitStatus afterStep(StepExecution stepExecution) {
+    LOG.info("LogExport step - after step execution");
+    return stepExecution.getExitStatus();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
new file mode 100644
index 0000000..6106c20
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentExporter.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.StepContribution;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.StepExecutionListener;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.batch.item.ExecutionContext;
+import org.springframework.batch.item.ItemStreamReader;
+import org.springframework.batch.repeat.RepeatStatus;
+
+public class DocumentExporter implements Tasklet, StepExecutionListener {
+
+  private boolean complete = false;
+  private final ItemStreamReader<Document> documentReader;
+  private final DocumentDestination documentDestination;
+  private final int writeBlockSize;
+
+  public DocumentExporter(ItemStreamReader<Document> documentReader, DocumentDestination documentDestination, int writeBlockSize) {
+    this.documentReader = documentReader;
+    this.documentDestination = documentDestination;
+    this.writeBlockSize = writeBlockSize;
+  }
+
+  @Override
+  public void beforeStep(StepExecution stepExecution) {
+
+  }
+
+  @Override
+  public ExitStatus afterStep(StepExecution stepExecution) {
+    if (complete) {
+      return ExitStatus.COMPLETED;
+    }
+		else {
+      return ExitStatus.FAILED;
+    }
+  }
+
+  @Override
+  public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+    ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext();
+    documentReader.open(executionContext);
+
+    DocumentItemWriter writer = null;
+    int writtenCount = 0;
+    try {
+      Document document;
+      while ((document = documentReader.read()) != null) {
+        if (writer != null && writtenCount >= writeBlockSize) {
+          writer.close();
+          writer = null;
+          writtenCount = 0;
+          documentReader.update(executionContext);
+        }
+
+        if (writer == null)
+          writer = documentDestination.open(document);
+
+        writer.write(document);
+        ++writtenCount;
+      }
+    }
+    catch (Exception e) {
+      if (writer != null) {
+        writer.revert();
+        writer = null;
+      }
+      throw e;
+    }
+    finally {
+      if (writer != null)
+        writer.close();
+      documentReader.close();
+    }
+
+    complete = true;
+    return RepeatStatus.FINISHED;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java
new file mode 100644
index 0000000..a4378a4
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemReader.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.springframework.batch.item.ExecutionContext;
+import org.springframework.batch.item.ItemStreamException;
+import org.springframework.batch.item.support.AbstractItemStreamItemReader;
+import org.springframework.batch.repeat.CompletionPolicy;
+import org.springframework.batch.repeat.RepeatContext;
+import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.batch.repeat.context.RepeatContextSupport;
+import org.springframework.util.ClassUtils;
+
+public class DocumentItemReader extends AbstractItemStreamItemReader<Document> implements CompletionPolicy {
+
+  public final static String POSITION = "last-read";
+
+  private final DocumentSource documentSource;
+  private final int readBlockSize;
+
+  private DocumentIterator documentIterator = null;
+  private int count = 0;
+  private boolean eof = false;
+  private Document current = null;
+  private Document previous = null;
+
+  public DocumentItemReader(DocumentSource documentSource, int readBlockSize) {
+    this.documentSource = documentSource;
+    this.readBlockSize = readBlockSize;
+    setName(ClassUtils.getShortName(DocumentItemReader.class));
+  }
+
+  @Override
+  public Document read() throws Exception {
+    if (documentIterator == null)
+      openStream();
+    Document next = getNext();
+    if (next == null && count > readBlockSize) {
+      openStream();
+      next = getNext();
+    }
+    eof = next == null;
+    if (eof && documentIterator != null)
+      documentIterator.close();
+
+    previous = current;
+    current = next;
+    return current;
+  }
+
+  private Document getNext() {
+    ++count;
+    return documentIterator.next();
+  }
+
+  private void openStream() {
+    closeStream();
+    documentIterator = documentSource.open(current, readBlockSize);
+    count = 0;
+  }
+
+  private void closeStream() {
+    if (documentIterator == null)
+      return;
+    try {
+      documentIterator.close();
+    }
+    catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    documentIterator = null;
+  }
+
+  @Override
+  public void open(ExecutionContext executionContext) {
+    super.open(executionContext);
+    current = null;
+    previous = null;
+    eof = false;
+    documentIterator = null;
+    if (!executionContext.containsKey(POSITION))
+      return;
+
+    current = (Document) executionContext.get(POSITION);
+  }
+
+  @Override
+  public void update(ExecutionContext executionContext) throws ItemStreamException {
+    super.update(executionContext);
+    if (previous != null)
+      executionContext.put(POSITION, previous);
+  }
+
+  @Override
+  public void close() {
+    closeStream();
+  }
+
+  @Override
+  public boolean isComplete(RepeatContext context, RepeatStatus result) {
+    return eof;
+  }
+
+  @Override
+  public boolean isComplete(RepeatContext context) {
+    return eof;
+  }
+
+  @Override
+  public RepeatContext start(RepeatContext parent) {
+    return new RepeatContextSupport(parent);
+  }
+
+  @Override
+  public void update(RepeatContext context) {
+    if (eof)
+      context.setCompleteOnly();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java
new file mode 100644
index 0000000..e96f6f1
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentItemWriter.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+public interface DocumentItemWriter {
+  void write(Document document);
+  void revert();
+  void close();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentIterator.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentIterator.java
new file mode 100644
index 0000000..6232cfc
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentIterator.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import java.util.Iterator;
+
+// TODO: generic closeable iterator
+public interface DocumentIterator extends Iterator<Document>, AutoCloseable {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentSource.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentSource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentSource.java
new file mode 100644
index 0000000..c9871a3
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentSource.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+// TODO: generic object source
+public interface DocumentSource {
+  DocumentIterator open(Document current, int rows);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java
new file mode 100644
index 0000000..26a8c63
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileAction.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import java.io.File;
+
+public interface FileAction {
+  File perform(File inputFile);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
new file mode 100644
index 0000000..02d898d
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriter.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.io.IOUtils;
+
+import java.io.*;
+
+public class LocalDocumentItemWriter implements DocumentItemWriter {
+  private static final ObjectMapper json = new ObjectMapper();
+  private static final String ENCODING = "UTF-8";
+
+  private final File outFile;
+  private final BufferedWriter bufferedWriter;
+  private final FileAction fileAction;
+
+  public LocalDocumentItemWriter(File outFile, FileAction fileAction) {
+    this.fileAction = fileAction;
+    this.outFile = outFile;
+    try {
+      this.bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outFile), ENCODING));
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException(e);
+    } catch (FileNotFoundException e) {
+      throw new UncheckedIOException(e);
+    }
+  }
+
+  @Override
+  public void write(Document document) {
+    try {
+      bufferedWriter.write(json.writeValueAsString(document));
+      bufferedWriter.newLine();
+    }
+    catch (IOException e) {
+      throw new UncheckedIOException(e);
+    }
+  }
+
+  @Override
+  public void revert() {
+    IOUtils.closeQuietly(bufferedWriter);
+    outFile.delete();
+  }
+
+  @Override
+  public void close() {
+    try {
+      bufferedWriter.close();
+      fileAction.perform(outFile);
+    } catch (IOException e) {
+      throw new UncheckedIOException(e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
new file mode 100644
index 0000000..495401d
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Properties.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.hibernate.validator.constraints.NotBlank;
+
+public class S3Properties {
+  @NotBlank
+  private String accessKey;
+  @NotBlank
+  private String secretKey;
+  @NotBlank
+  private String keyPrefix;
+  @NotBlank
+  private String bucketName;
+
+  public String getAccessKey() {
+    return accessKey;
+  }
+
+  public String getSecretKey() {
+    return secretKey;
+  }
+
+  public String getKeyPrefix() {
+    return keyPrefix;
+  }
+
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  public void setAccessKey(String accessKey) {
+    this.accessKey = accessKey;
+  }
+
+  public void setSecretKey(String secretKey) {
+    this.secretKey = secretKey;
+  }
+
+  public void setKeyPrefix(String keyPrefix) {
+    this.keyPrefix = keyPrefix;
+  }
+
+  public void setBucketName(String bucketName) {
+    this.bucketName = bucketName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
new file mode 100644
index 0000000..3214e50
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/S3Uploader.java
@@ -0,0 +1,51 @@
+package org.apache.ambari.infra.job.archive;
+
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.services.s3.AmazonS3Client;
+
+import java.io.File;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+public class S3Uploader implements FileAction {
+
+  private final AmazonS3Client client;
+  private final String keyPrefix;
+  private final String bucketName;
+
+  public S3Uploader(S3Properties s3Properties) {
+    this.keyPrefix = s3Properties.getKeyPrefix();
+    this.bucketName = s3Properties.getBucketName();
+    BasicAWSCredentials credentials = new BasicAWSCredentials(s3Properties.getAccessKey(), s3Properties.getSecretKey());
+    client = new AmazonS3Client(credentials);
+  }
+
+  @Override
+  public File perform(File inputFile) {
+    String key = keyPrefix + inputFile.getName();
+
+    if (client.doesObjectExist(bucketName, key)) {
+      System.out.println("Object '" + key + "' already exists");
+      System.exit(0);
+    }
+
+    client.putObject(bucketName, key, inputFile);
+    return inputFile;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
new file mode 100644
index 0000000..db4069b
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.TimeZone;
+
+public class SolrDocumentIterator implements DocumentIterator {
+
+  private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
+
+  static {
+    SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
+  }
+
+  private final Iterator<SolrDocument> documentIterator;
+  private final CloudSolrClient client;
+
+
+  public SolrDocumentIterator(QueryResponse response, CloudSolrClient client) {
+    documentIterator = response.getResults().iterator();
+    this.client = client;
+  }
+
+  @Override
+  public Document next() {
+    if (!documentIterator.hasNext())
+      return null;
+    
+    SolrDocument document = documentIterator.next();
+    HashMap<String, String> fieldMap = new HashMap<>();
+    for (String key : document.getFieldNames()) {
+      fieldMap.put(key, toString(document.get(key)));
+    }
+
+    return new Document(fieldMap);
+  }
+
+  private String toString(Object value) {
+    if (value == null) {
+      return null;
+    }
+    else if (value instanceof Date) {
+      return SOLR_DATE_FORMAT.format(value);
+    }
+    else {
+      return value.toString();
+    }
+  }
+
+  @Override
+  public void close() {
+    try {
+      client.close();
+    } catch (IOException e) {
+      throw new UncheckedIOException(e);
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return documentIterator.hasNext();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java
new file mode 100644
index 0000000..2181ba3
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentSource.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.time.format.DateTimeFormatter;
+
+public class SolrDocumentSource implements DocumentSource {
+  public static final DateTimeFormatter SOLR_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX");
+  private static final Logger LOG = LoggerFactory.getLogger(SolrDocumentSource.class);
+
+  private final String zkHost;
+  private final SolrQueryProperties properties;
+  private final String endValue;
+
+  public SolrDocumentSource(String zkHost, SolrQueryProperties properties, String endValue) {
+    this.zkHost = zkHost;
+    this.properties = properties;
+    this.endValue = endValue;
+  }
+
+  @Override
+  public DocumentIterator open(Document current, int rows) {
+    CloudSolrClient client = new CloudSolrClient.Builder().withZkHost(zkHost).build();
+    client.setDefaultCollection(properties.getCollection());
+
+    SolrQuery query = properties.toQueryBuilder()
+            .setEndValue(endValue)
+            .setDocument(current)
+            .build();
+    query.setRows(rows);
+
+    LOG.info("Executing solr query {}", query.toLocalParamsString());
+
+    try {
+      QueryResponse response = client.query(query);
+      return new SolrDocumentIterator(response, client);
+    } catch (SolrServerException e) {
+      throw new RuntimeException(e);
+    } catch (IOException e) {
+      throw new UncheckedIOException(e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
new file mode 100644
index 0000000..d0f6d40
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.apache.solr.client.solrj.SolrQuery;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static org.apache.solr.client.solrj.SolrQuery.ORDER.asc;
+
+public class SolrQueryBuilder {
+
+  public static final Pattern PARAMETER_PATTERN = Pattern.compile("\\$\\{[a-z]+\\}");
+
+  private String queryText;
+  private String endValue;
+  private String filterQueryText;
+  private Document document;
+  private String[] sortFields;
+
+  public SolrQueryBuilder() {
+    this.queryText = "*:*";
+  }
+
+  public SolrQueryBuilder setQueryText(String queryText) {
+    this.queryText = queryText;
+    return this;
+  }
+
+  public SolrQueryBuilder setEndValue(String endValue) {
+    this.endValue = endValue;
+    return this;
+  }
+
+  public SolrQueryBuilder setFilterQueryText(String filterQueryText) {
+    this.filterQueryText = filterQueryText;
+    return this;
+  }
+
+
+  public SolrQueryBuilder setDocument(Document document) {
+    this.document = document;
+    return this;
+  }
+
+  public SolrQueryBuilder addSort(String... sortBy) {
+    this.sortFields = sortBy;
+    return this;
+  }
+
+  public SolrQuery build() {
+    SolrQuery solrQuery = new SolrQuery();
+
+    String query = queryText;
+    query = setEndValueOn(query);
+
+    solrQuery.setQuery(query);
+
+    if (filterQueryText != null) {
+      String filterQuery = filterQueryText;
+      filterQuery = setEndValueOn(filterQuery);
+
+      Set<String> paramNames = collectParamNames(filterQuery);
+      if (document != null) {
+        for (String parameter : paramNames) {
+          if (document.get(parameter) != null)
+            filterQuery = filterQuery.replace(String.format("${%s}", parameter), document.get(parameter));
+        }
+      }
+
+      if (document == null && paramNames.isEmpty() || document != null && !paramNames.isEmpty())
+        solrQuery.setFilterQueries(filterQuery);
+    }
+
+    if (sortFields != null) {
+      for (String field : sortFields)
+        solrQuery.addSort(field, asc);
+    }
+
+    return solrQuery;
+  }
+
+  private String setEndValueOn(String query) {
+    if (endValue != null)
+      query = query.replace("${end}", endValue);
+    return query;
+  }
+
+  private Set<String> collectParamNames(String filterQuery) {
+    Matcher matcher = PARAMETER_PATTERN.matcher(filterQuery);
+    Set<String> parameters = new HashSet<>();
+    while (matcher.find())
+      parameters.add(matcher.group().replace("${", "").replace("}", ""));
+    return parameters;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java
new file mode 100644
index 0000000..444a15b
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryProperties.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.hibernate.validator.constraints.NotBlank;
+
+public class SolrQueryProperties {
+  @NotBlank
+  private String collection;
+  @NotBlank
+  private String queryText;
+  private String filterQueryText;
+  private String[] sort;
+
+  public String getCollection() {
+    return collection;
+  }
+
+  public void setCollection(String collection) {
+    this.collection = collection;
+  }
+
+  public String getQueryText() {
+    return queryText;
+  }
+
+  public void setQueryText(String queryText) {
+    this.queryText = queryText;
+  }
+
+  public String getFilterQueryText() {
+    return filterQueryText;
+  }
+
+  public void setFilterQueryText(String filterQueryText) {
+    this.filterQueryText = filterQueryText;
+  }
+
+  public String[] getSort() {
+    return sort;
+  }
+
+  public void setSort(String[] sort) {
+    this.sort = sort;
+  }
+
+  public SolrQueryBuilder toQueryBuilder() {
+    return new SolrQueryBuilder().
+            setQueryText(queryText)
+            .setFilterQueryText(filterQueryText)
+            .addSort(sort);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java
new file mode 100644
index 0000000..8e34ca9
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/TarGzCompressor.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
+import org.apache.commons.io.IOUtils;
+
+import java.io.*;
+
+public class TarGzCompressor implements FileAction {
+  @Override
+  public File perform(File inputFile) {
+    File tarGzFile = new File(inputFile.getParent(), inputFile.getName() + ".tar.gz");
+    try (TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream(
+            new GzipCompressorOutputStream(new FileOutputStream(tarGzFile)))) {
+      TarArchiveEntry archiveEntry = new TarArchiveEntry(inputFile.getName());
+      archiveEntry.setSize(inputFile.length());
+      tarArchiveOutputStream.putArchiveEntry(archiveEntry);
+
+      try (FileInputStream fileInputStream = new FileInputStream(inputFile)) {
+        IOUtils.copy(fileInputStream, tarArchiveOutputStream);
+      }
+
+      tarArchiveOutputStream.closeArchiveEntry();
+    }
+    catch (IOException ex) {
+      throw new UncheckedIOException(ex);
+    }
+
+    return tarGzFile;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
index fc0a4f7..862119a 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/manager/JobManager.java
@@ -18,6 +18,7 @@
  */
 package org.apache.ambari.infra.manager;
 
+import com.google.common.base.Splitter;
 import com.google.common.collect.Lists;
 import org.apache.ambari.infra.model.ExecutionContextResponse;
 import org.apache.ambari.infra.model.JobDetailsResponse;
@@ -28,16 +29,14 @@ import org.apache.ambari.infra.model.JobOperationParams;
 import org.apache.ambari.infra.model.StepExecutionContextResponse;
 import org.apache.ambari.infra.model.StepExecutionInfoResponse;
 import org.apache.ambari.infra.model.StepExecutionProgressResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.batch.admin.history.StepExecutionHistory;
 import org.springframework.batch.admin.service.JobService;
 import org.springframework.batch.admin.service.NoSuchStepExecutionException;
 import org.springframework.batch.admin.web.JobInfo;
 import org.springframework.batch.admin.web.StepExecutionProgress;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInstance;
-import org.springframework.batch.core.JobParametersBuilder;
-import org.springframework.batch.core.JobParametersInvalidException;
-import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.*;
 import org.springframework.batch.core.launch.JobExecutionNotRunningException;
 import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException;
 import org.springframework.batch.core.launch.JobOperator;
@@ -54,7 +53,6 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -64,6 +62,8 @@ import java.util.TimeZone;
 @Named
 public class JobManager {
 
+  private static final Logger LOG = LoggerFactory.getLogger(JobManager.class);
+
   @Inject
   private JobService jobService;
 
@@ -83,9 +83,14 @@ public class JobManager {
   public JobExecutionInfoResponse launchJob(String jobName, String params)
     throws JobParametersInvalidException, JobInstanceAlreadyExistsException, NoSuchJobException,
     JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException {
-    // TODO: handle params
     JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
-    jobParametersBuilder.addDate("date", new Date());
+    if (params != null) {
+      LOG.info("Parsing parameters of job {} '{}'", jobName, params);
+      Splitter.on(',')
+              .trimResults()
+              .withKeyValueSeparator(Splitter.on('=').limit(2).trimResults())
+              .split(params).entrySet().forEach(entry -> jobParametersBuilder.addString(entry.getKey(), entry.getValue()));
+    }
     return new JobExecutionInfoResponse(jobService.launch(jobName, jobParametersBuilder.toJobParameters()), timeZone);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
index 8162376..7ef70aa 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infra-manager.properties
@@ -18,3 +18,15 @@ infra-manager.batch.db.username=admin
 infra-manager.batch.db.password=admin
 management.security.enabled=false
 management.health.solr.enabled=false
+infra-manager.server.data.folder=/tmp
+
+infra-manager.jobs.solr_data_export.zoo_keeper_socket=zookeeper:2181
+infra-manager.jobs.solr_data_export.read_block_size=100
+infra-manager.jobs.solr_data_export.write_block_size=150
+infra-manager.jobs.solr_data_export.file_name_suffix_column=logtime
+infra-manager.jobs.solr_data_export.destination_directory_path=/tmp/ambariInfraManager
+infra-manager.jobs.solr_data_export.query.collection=hadoop_logs
+infra-manager.jobs.solr_data_export.query.query_text=logtime:[* TO "${end}"]
+infra-manager.jobs.solr_data_export.query.filter_query_text=(logtime:"${logtime}" AND id:{"${id}" TO *]) OR logtime:{"${logtime}" TO "${end}"]
+infra-manager.jobs.solr_data_export.query.sort[0]=logtime
+infra-manager.jobs.solr_data_export.query.sort[1]=id

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml b/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
index ad1adcd..9737554 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/log4j2.xml
@@ -17,7 +17,7 @@
 -->
 <Configuration monitorinterval="30" status="info" strict="true">
   <Properties>
-    <Property name="logging.file">out/infra-manager.log</Property>
+    <Property name="logging.file">target/log/infra-manager.log</Property>
   </Properties>
   <Appenders>
     <Appender type="Console" name="Console">

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
new file mode 100644
index 0000000..88fbff0
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.infra.job.archive;
+
+import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.scope.context.StepContext;
+import org.springframework.batch.item.ExecutionContext;
+import org.springframework.batch.item.ItemStreamReader;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.HashMap;
+
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+
+@RunWith(EasyMockRunner.class)
+public class DocumentExporterTest extends EasyMockSupport {
+
+  private DocumentExporter documentExporter;
+  @Mock
+  private ItemStreamReader<Document> reader;
+  @Mock
+  private DocumentDestination documentDestination;
+  @Mock
+  private DocumentItemWriter documentItemWriter;
+  @Mock
+  private DocumentItemWriter documentItemWriter2;
+
+  private ExecutionContext executionContext;
+  private ChunkContext chunkContext;
+  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{ put("id", "1"); }});
+
+  @Before
+  public void setUp() throws Exception {
+    StepExecution stepExecution = new StepExecution("exportDoc", new JobExecution(1L));
+    chunkContext = new ChunkContext(new StepContext(stepExecution));
+    executionContext = stepExecution.getExecutionContext();
+    documentExporter = new DocumentExporter(reader, documentDestination, 2);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    verifyAll();
+  }
+
+  @Test
+  public void testNothingToRead() throws Exception {
+    reader.open(executionContext); expectLastCall();
+    expect(reader.read()).andReturn(null);
+    reader.close(); expectLastCall();
+    replayAll();
+
+    documentExporter.execute(null, chunkContext);
+  }
+
+  @Test
+  public void testWriteLessDocumentsThanWriteBlockSize() throws Exception {
+    reader.open(executionContext); expectLastCall();
+    expect(reader.read()).andReturn(DOCUMENT);
+    expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
+    documentItemWriter.write(DOCUMENT); expectLastCall();
+    expect(reader.read()).andReturn(null);
+    reader.close(); expectLastCall();
+    documentItemWriter.close(); expectLastCall();
+    replayAll();
+
+    documentExporter.execute(null, chunkContext);
+  }
+
+  @Test
+  public void testWriteMoreDocumentsThanWriteBlockSize() throws Exception {
+    Document document2 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
+    Document document3 = new Document(new HashMap<String, String>() {{ put("id", "3"); }});
+
+    reader.open(executionContext); expectLastCall();
+    expect(reader.read()).andReturn(DOCUMENT);
+    expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
+    documentItemWriter.write(DOCUMENT); expectLastCall();
+    expect(reader.read()).andReturn(document2);
+    documentItemWriter.write(document2); expectLastCall();
+    expect(reader.read()).andReturn(document3);
+    documentItemWriter.close(); expectLastCall();
+    expect(documentDestination.open(document3)).andReturn(documentItemWriter2);
+    documentItemWriter2.write(document3); expectLastCall();
+    expect(reader.read()).andReturn(null);
+    reader.update(executionContext);
+    reader.close(); expectLastCall();
+    documentItemWriter2.close(); expectLastCall();
+    replayAll();
+
+    documentExporter.execute(null, chunkContext);
+  }
+
+  @Test(expected = IOException.class)
+  public void testReadError() throws Exception {
+    reader.open(executionContext); expectLastCall();
+    expect(reader.read()).andReturn(DOCUMENT);
+    expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
+    documentItemWriter.write(DOCUMENT); expectLastCall();
+    expect(reader.read()).andThrow(new IOException("TEST"));
+    documentItemWriter.revert(); expectLastCall();
+    reader.close(); expectLastCall();
+    replayAll();
+
+    documentExporter.execute(null, chunkContext);
+  }
+
+  @Test(expected = UncheckedIOException.class)
+  public void testWriteError() throws Exception {
+    reader.open(executionContext); expectLastCall();
+    expect(reader.read()).andReturn(DOCUMENT);
+    expect(documentDestination.open(DOCUMENT)).andReturn(documentItemWriter);
+    documentItemWriter.write(DOCUMENT); expectLastCall().andThrow(new UncheckedIOException(new IOException("TEST")));
+    documentItemWriter.revert(); expectLastCall();
+    reader.close(); expectLastCall();
+    replayAll();
+
+    documentExporter.execute(null, chunkContext);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
new file mode 100644
index 0000000..942713f
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
@@ -0,0 +1,197 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.infra.job.archive;
+
+import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.batch.item.ExecutionContext;
+
+import java.util.HashMap;
+
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.nullValue;
+import static org.junit.Assert.assertThat;
+
+@RunWith(EasyMockRunner.class)
+public class DocumentItemReaderTest extends EasyMockSupport {
+  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT_2 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
+  private static final Document DOCUMENT_3 = new Document(new HashMap<String, String>() {{ put("id", "3"); }});
+  private static final int READ_BLOCK_SIZE = 2;
+
+  private DocumentItemReader documentItemReader;
+  @Mock
+  private DocumentSource documentSource;
+  @Mock
+  private DocumentIterator documentIterator;
+  @Mock
+  private DocumentIterator documentIterator2;
+
+  @Before
+  public void setUp() throws Exception {
+    documentItemReader = new DocumentItemReader(documentSource, READ_BLOCK_SIZE);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    verifyAll();
+  }
+
+  @Test
+  public void testReadWhenCollectionIsEmpty() throws Exception {
+    expect(documentSource.open(null, 2)).andReturn(documentIterator);
+    expect(documentIterator.next()).andReturn(null);
+    documentIterator.close(); expectLastCall();
+    replayAll();
+
+    assertThat(documentItemReader.read(), is(nullValue()));
+    assertThat(documentItemReader.isComplete(null), is(true));
+    assertThat(documentItemReader.isComplete(null, null), is(true));
+  }
+
+  @Test
+  public void testReadWhenCollectionContainsLessElementsThanReadBlockSize() throws Exception {
+    expect(documentSource.open(null, 2)).andReturn(documentIterator);
+    expect(documentIterator.next()).andReturn(DOCUMENT);
+    expect(documentIterator.next()).andReturn(null);
+    documentIterator.close(); expectLastCall();
+    replayAll();
+
+    assertThat(documentItemReader.read(), is(DOCUMENT));
+    assertThat(documentItemReader.isComplete(null), is(false));
+    assertThat(documentItemReader.isComplete(null, null), is(false));
+    assertThat(documentItemReader.read(), is(nullValue()));
+    assertThat(documentItemReader.isComplete(null), is(true));
+    assertThat(documentItemReader.isComplete(null, null), is(true));
+  }
+
+  @Test
+  public void testReadWhenCollectionContainsExactlySameCountElementsAsReadBlockSize() throws Exception {
+    expect(documentSource.open(null, 2)).andReturn(documentIterator);
+    expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2);
+    expect(documentIterator.next()).andReturn(DOCUMENT);
+    expect(documentIterator.next()).andReturn(DOCUMENT_2);
+    expect(documentIterator.next()).andReturn(null);
+    documentIterator.close(); expectLastCall();
+
+    expect(documentIterator2.next()).andReturn(null);
+    documentIterator2.close(); expectLastCall();
+    replayAll();
+
+    assertThat(documentItemReader.read(), is(DOCUMENT));
+    assertThat(documentItemReader.isComplete(null), is(false));
+    assertThat(documentItemReader.isComplete(null, null), is(false));
+    assertThat(documentItemReader.read(), is(DOCUMENT_2));
+    assertThat(documentItemReader.isComplete(null), is(false));
+    assertThat(documentItemReader.isComplete(null, null), is(false));
+    assertThat(documentItemReader.read(), is(nullValue()));
+    assertThat(documentItemReader.isComplete(null), is(true));
+    assertThat(documentItemReader.isComplete(null, null), is(true));
+  }
+
+  @Test
+  public void testReadWhenCollectionContainsMoreElementsThanReadBlockSize() throws Exception {
+    Document document3 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
+
+    expect(documentSource.open(null, 2)).andReturn(documentIterator);
+    expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2);
+    expect(documentIterator.next()).andReturn(DOCUMENT);
+    expect(documentIterator.next()).andReturn(DOCUMENT_2);
+    expect(documentIterator.next()).andReturn(null);
+    documentIterator.close(); expectLastCall();
+    expect(documentIterator2.next()).andReturn(document3);
+    expect(documentIterator2.next()).andReturn(null);
+    documentIterator2.close(); expectLastCall();
+
+    replayAll();
+
+    assertThat(documentItemReader.read(), is(DOCUMENT));
+    assertThat(documentItemReader.isComplete(null), is(false));
+    assertThat(documentItemReader.isComplete(null, null), is(false));
+
+    assertThat(documentItemReader.read(), is(DOCUMENT_2));
+    assertThat(documentItemReader.isComplete(null), is(false));
+    assertThat(documentItemReader.isComplete(null, null), is(false));
+
+    assertThat(documentItemReader.read(), is(document3));
+    assertThat(documentItemReader.isComplete(null), is(false));
+    assertThat(documentItemReader.isComplete(null, null), is(false));
+
+    assertThat(documentItemReader.read(), is(nullValue()));
+    assertThat(documentItemReader.isComplete(null), is(true));
+    assertThat(documentItemReader.isComplete(null, null), is(true));
+  }
+
+  @Test
+  public void testContinueWhenOnlyFirstElementWasRead() throws Exception {
+    expect(documentSource.open(null, 2)).andReturn(documentIterator);
+    expect(documentIterator.next()).andReturn(DOCUMENT);
+    documentIterator.close(); expectLastCall();
+    expect(documentSource.open(null, 2)).andReturn(documentIterator2);
+    expect(documentIterator2.next()).andReturn(DOCUMENT);
+    documentIterator2.close(); expectLastCall();
+    replayAll();
+
+    ExecutionContext executionContext = new ExecutionContext();
+    documentItemReader.open(executionContext);
+    assertThat(documentItemReader.read(), is(DOCUMENT));
+    documentItemReader.update(executionContext);
+    assertThat(executionContext.containsKey(DocumentItemReader.POSITION), is(false));
+    documentItemReader.close();
+
+    documentItemReader.open(executionContext);
+    assertThat(documentItemReader.read(), is(DOCUMENT));
+    documentItemReader.close();
+  }
+
+  @Test
+  public void testContinueWhenMoreThanOneElementWasRead() throws Exception {
+    expect(documentSource.open(null, 2)).andReturn(documentIterator);
+    expect(documentIterator.next()).andReturn(DOCUMENT);
+    expect(documentIterator.next()).andReturn(DOCUMENT_2);
+    documentIterator.close(); expectLastCall();
+    expect(documentSource.open(DOCUMENT, 2)).andReturn(documentIterator2);
+    expect(documentIterator2.next()).andReturn(DOCUMENT_2);
+    expect(documentIterator2.next()).andReturn(DOCUMENT_3);
+    documentIterator2.close(); expectLastCall();
+
+    replayAll();
+
+    ExecutionContext executionContext = new ExecutionContext();
+    documentItemReader.open(executionContext);
+    assertThat(documentItemReader.read(), is(DOCUMENT));
+    assertThat(documentItemReader.read(), is(DOCUMENT_2));
+    documentItemReader.update(executionContext);
+    assertThat(executionContext.get(DocumentItemReader.POSITION), is(DOCUMENT));
+    documentItemReader.close();
+
+    documentItemReader.open(executionContext);
+    assertThat(documentItemReader.read(), is(DOCUMENT_2));
+    assertThat(documentItemReader.read(), is(DOCUMENT_3));
+    documentItemReader.close();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/393fdb80/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
new file mode 100644
index 0000000..6411ff1
--- /dev/null
+++ b/ambari-infra/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.infra.job.archive;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import static org.easymock.EasyMock.expect;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+@RunWith(EasyMockRunner.class)
+public class LocalDocumentItemWriterTest extends EasyMockSupport {
+
+  private static final Document DOCUMENT = new Document(new HashMap<String, String>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT2 = new Document(new HashMap<String, String>() {{ put("id", "2"); }});
+  private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
+
+  private LocalDocumentItemWriter localDocumentItemWriter;
+  private File outFile;
+  @Mock
+  private FileAction fileAction;
+
+  @Before
+  public void setUp() throws Exception {
+    outFile = File.createTempFile("LocalDocumentItemWriterTest", "json.tmp");
+    localDocumentItemWriter = new LocalDocumentItemWriter(outFile, fileAction);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    outFile.delete();
+    verifyAll();
+  }
+
+  @Test
+  public void testWrite() throws Exception {
+    expect(fileAction.perform(outFile)).andReturn(outFile);
+    replayAll();
+
+    localDocumentItemWriter.write(DOCUMENT);
+    localDocumentItemWriter.write(DOCUMENT2);
+    localDocumentItemWriter.close();
+
+    List<Document> documentList = readBack(outFile);
+    assertThat(documentList.size(), is(2));
+    assertThat(documentList.get(0).get("id"), is(DOCUMENT.get("id")));
+    assertThat(documentList.get(1).get("id"), is(DOCUMENT2.get("id")));
+  }
+
+  private List<Document> readBack(File file) throws IOException {
+    List<Document> documentList = new ArrayList<>();
+    for (String line : FileUtils.readLines(file)) {
+      documentList.add(OBJECT_MAPPER.readValue(line, Document.class));
+    }
+    return documentList;
+  }
+
+  @Test
+  public void testRevert() throws Exception {
+    replayAll();
+
+    localDocumentItemWriter.write(DOCUMENT);
+    localDocumentItemWriter.revert();
+
+    assertThat(outFile.exists(), is(false));
+  }
+}
\ No newline at end of file


[24/24] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-20859

Posted by rl...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-20859


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/deb18003
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/deb18003
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/deb18003

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: deb18003619a3aade99dc6fa39e4da57762f2d60
Parents: 167b482 0d7f609
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Nov 29 11:58:04 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Nov 29 11:58:04 2017 -0500

----------------------------------------------------------------------
 .../libraries/functions/component_version.py    |   9 +-
 ambari-infra/ambari-infra-manager/pom.xml       |  11 ++
 .../infra/job/archive/CompositeFileAction.java  |  46 +++++
 .../ambari/infra/job/archive/Document.java      |  54 +++++
 .../infra/job/archive/DocumentDestination.java  |  23 +++
 .../archive/DocumentExportConfiguration.java    | 118 +++++++++++
 .../job/archive/DocumentExportJobListener.java  |  35 ++++
 .../job/archive/DocumentExportProperties.java   | 112 +++++++++++
 .../job/archive/DocumentExportStepListener.java |  47 +++++
 .../infra/job/archive/DocumentExporter.java     |  99 ++++++++++
 .../infra/job/archive/DocumentItemReader.java   | 135 +++++++++++++
 .../infra/job/archive/DocumentItemWriter.java   |  25 +++
 .../infra/job/archive/DocumentIterator.java     |  25 +++
 .../infra/job/archive/DocumentSource.java       |  24 +++
 .../ambari/infra/job/archive/FileAction.java    |  25 +++
 .../job/archive/LocalDocumentItemWriter.java    |  72 +++++++
 .../ambari/infra/job/archive/S3Properties.java  |  64 ++++++
 .../ambari/infra/job/archive/S3Uploader.java    |  51 +++++
 .../infra/job/archive/SolrDocumentIterator.java |  90 +++++++++
 .../infra/job/archive/SolrDocumentSource.java   |  68 +++++++
 .../infra/job/archive/SolrQueryBuilder.java     | 115 +++++++++++
 .../infra/job/archive/SolrQueryProperties.java  |  69 +++++++
 .../infra/job/archive/TarGzCompressor.java      |  50 +++++
 .../apache/ambari/infra/manager/JobManager.java |  21 +-
 .../src/main/resources/infra-manager.properties |  12 ++
 .../src/main/resources/log4j2.xml               |   2 +-
 .../infra/job/archive/DocumentExporterTest.java | 147 ++++++++++++++
 .../job/archive/DocumentItemReaderTest.java     | 197 +++++++++++++++++++
 .../archive/LocalDocumentItemWriterTest.java    |  98 +++++++++
 .../infra/job/archive/SolrQueryBuilderTest.java | 113 +++++++++++
 .../ambari/logfeeder/filter/FilterKeyValue.java |   4 +-
 .../LogsearchAuthenticationEntryPoint.java      |  15 +-
 .../src/app/classes/filtering.ts                |  22 ++-
 .../src/app/classes/models/app-state.ts         |   3 +-
 .../classes/queries/audit-logs-query-params.ts  |   3 +-
 .../service-logs-truncated-query-params.ts      |   3 +-
 .../src/app/classes/string.ts                   |  25 +++
 .../filters-panel/filters-panel.component.html  |   8 +-
 .../filters-panel.component.spec.ts             |   1 +
 .../filters-panel/filters-panel.component.ts    |  99 +++++-----
 .../logs-container.component.html               |   2 +-
 .../logs-container/logs-container.component.ts  |   5 +-
 .../src/app/components/mixins.less              |   2 +-
 .../search-box/search-box.component.html        |  24 ++-
 .../search-box/search-box.component.less        |  23 ++-
 .../search-box/search-box.component.ts          | 122 +++++++++---
 .../src/app/components/variables.less           |   6 +-
 .../services/component-actions.service.spec.ts  |   4 +-
 .../app/services/component-actions.service.ts   |   2 +-
 .../component-generator.service.spec.ts         |   4 +-
 .../app/services/logs-container.service.spec.ts |   4 +-
 .../src/app/services/logs-container.service.ts  |  69 +++++--
 .../src/app/services/utils.service.ts           |   4 +
 .../test-config/logfeeder/logfeeder.properties  |   2 +-
 .../stackadvisor/StackAdvisorRequest.java       |  19 ++
 .../commands/StackAdvisorCommand.java           |   2 +
 .../ambari/server/controller/AmbariServer.java  |   3 +-
 .../internal/BlueprintResourceProvider.java     |   3 +-
 .../internal/StackAdvisorResourceProvider.java  |   9 +-
 .../internal/UpgradeResourceProvider.java       |   8 -
 .../dispatchers/AmbariSNMPDispatcher.java       |  10 +-
 .../apache/ambari/server/state/ServiceInfo.java |   2 +-
 .../ambari/server/topology/Blueprint.java       |   2 +-
 .../ambari/server/topology/BlueprintImpl.java   |   3 +-
 .../server/topology/BlueprintValidator.java     |   3 +-
 .../server/topology/BlueprintValidatorImpl.java |  23 ++-
 .../GPLLicenseNotAcceptedException.java         |  28 +++
 .../OOZIE/4.2.0.3.0/metainfo.xml                |   9 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   6 +
 .../3.0.0.3.0/package/scripts/params_linux.py   |   6 +
 .../custom_actions/scripts/ru_execute_tasks.py  |   5 +-
 .../HDP/2.0.6/properties/stack_packages.json    |   4 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  35 ++++
 .../stacks/HDP/2.5/services/FALCON/metainfo.xml |   4 +-
 .../services/YARN/configuration/yarn-site.xml   |   4 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |   2 +-
 .../stacks/HDP/2.6/services/FALCON/metainfo.xml |  26 +++
 .../stacks/HDP/2.6/services/OOZIE/metainfo.xml  |  37 ++++
 .../stacks/HDP/2.6/services/stack_advisor.py    |   3 +
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml  |  14 +-
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |  10 +-
 .../stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |   4 +
 .../HDP/3.0/properties/stack_packages.json      |   4 +-
 .../services/YARN/configuration/yarn-site.xml   |   4 +-
 .../PERF/1.0/configuration/cluster-env.xml      |  26 +++
 .../1.0/hooks/before-INSTALL/scripts/hook.py    |   7 +
 .../PERF/1.0/properties/stack_packages.json     |   8 +
 .../FAKEYARN/configuration/yarn-site.xml        |   4 +-
 .../src/main/resources/stacks/stack_advisor.py  |   9 +
 .../ValidationResourceProviderTest.java         |   4 +-
 .../dispatchers/AmbariSNMPDispatcherTest.java   |  23 +--
 .../AlertNoticeDispatchServiceTest.java         |  53 ++++-
 .../server/topology/BlueprintImplTest.java      |  87 +++++++-
 .../topology/BlueprintValidatorImplTest.java    |  10 +-
 .../custom_actions/test_ru_execute_tasks.py     |   6 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |  59 +++++-
 .../stacks/2.5/common/test_stack_advisor.py     |   2 +-
 .../stacks/2.6/common/test_stack_advisor.py     |  11 ++
 .../files/src/main/resources/ui/bower.json      |   4 +-
 .../src/main/resources/ui/hive-web/bower.json   |   4 +-
 .../scripts/containers/ComponentDetailView.jsx  |   2 +-
 .../storm/src/main/resources/ui/package.json    |  20 +-
 102 files changed, 2793 insertions(+), 251 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/deb18003/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------


[12/24] ambari git commit: AMBARI-22527 : Error while viewing topology details - Storm View (Sanket Shah via mradhakrishnan)

Posted by rl...@apache.org.
AMBARI-22527 : Error while viewing topology details - Storm View (Sanket Shah via mradhakrishnan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/11d7a6ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/11d7a6ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/11d7a6ad

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 11d7a6ad7a1c7070e87069d5183acfeb68fe6ab9
Parents: aa652af
Author: Madhuvanthi Radhakrishnan <mr...@hortonworks.com>
Authored: Tue Nov 28 10:34:26 2017 -0800
Committer: Madhuvanthi Radhakrishnan <mr...@hortonworks.com>
Committed: Tue Nov 28 10:34:26 2017 -0800

----------------------------------------------------------------------
 .../scripts/containers/ComponentDetailView.jsx  |  2 +-
 .../storm/src/main/resources/ui/package.json    | 20 ++++++++++----------
 2 files changed, 11 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/11d7a6ad/contrib/views/storm/src/main/resources/ui/app/scripts/containers/ComponentDetailView.jsx
----------------------------------------------------------------------
diff --git a/contrib/views/storm/src/main/resources/ui/app/scripts/containers/ComponentDetailView.jsx b/contrib/views/storm/src/main/resources/ui/app/scripts/containers/ComponentDetailView.jsx
index 2b3ed69..a5b356b 100644
--- a/contrib/views/storm/src/main/resources/ui/app/scripts/containers/ComponentDetailView.jsx
+++ b/contrib/views/storm/src/main/resources/ui/app/scripts/containers/ComponentDetailView.jsx
@@ -574,7 +574,7 @@ export default class ComponentDetailView extends Component {
               <div className="form-group">
                 <label className="col-sm-4 control-label">ID:</label>
                 <div className="col-sm-8">
-                  <p className="form-control-static" style={{'word-wrap' : 'break-word'}}>{componentDetail.id}</p>
+                  <p className="form-control-static" style={{'wordWrap' : 'break-word'}}>{componentDetail.id}</p>
                 </div>
               </div>
               <div className="form-group">

http://git-wip-us.apache.org/repos/asf/ambari/blob/11d7a6ad/contrib/views/storm/src/main/resources/ui/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/storm/src/main/resources/ui/package.json b/contrib/views/storm/src/main/resources/ui/package.json
index f9e029b..3b68936 100644
--- a/contrib/views/storm/src/main/resources/ui/package.json
+++ b/contrib/views/storm/src/main/resources/ui/package.json
@@ -66,22 +66,22 @@
     "postcss-loader": "^1.2.2",
     "prop-types": "^15.6.0",
     "react": "15.6.2",
-    "react-bootstrap": "^0.31.3",
-    "react-bootstrap-switch": "^3.4.5",
-    "react-breadcrumbs": "^1.3.16",
-    "react-codemirror": "^0.2.6",
-    "react-datetime": "^2.5.0",
+    "react-bootstrap": "0.31.3",
+    "react-bootstrap-switch": "3.4.5",
+    "react-breadcrumbs": "1.3.16",
+    "react-codemirror": "0.2.6",
+    "react-datetime": "2.5.0",
     "react-dom": "15.6.2",
     "react-hot-loader": "^3.0.0-beta.3",
     "react-onclickoutside": "^5.3.2",
-    "react-router": "^2.6.0",
-    "react-router-bootstrap": "^0.23.1",
-    "react-select": "^1.0.0-beta14",
-    "react-toastr": "^2.8.0",
+    "react-router": "2.6.0",
+    "react-router-bootstrap": "0.23.1",
+    "react-select": "1.0.0-rc.10",
+    "react-toastr": "2.8.0",
     "react-treebeard": "^1.1.4",
     "react-twitter-typeahead": "^1.1.12",
     "react-utils": "^1.0.0",
-    "reactable": "^0.14.0",
+    "reactable": "0.14.0",
     "require-dir": "^0.3.0",
     "run-sequence": "^1.2.2",
     "style-loader": "^0.13.1",


[06/24] ambari git commit: AMBARI-22531 Log Search UI: refine search box. (ababiichuk)

Posted by rl...@apache.org.
AMBARI-22531 Log Search UI: refine search box. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2bf3c8ed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2bf3c8ed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2bf3c8ed

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 2bf3c8edb26a6f0aac43699483e5ebee89dcc533
Parents: 33ee1a7
Author: ababiichuk <ab...@hortonworks.com>
Authored: Tue Nov 28 16:13:51 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Tue Nov 28 16:49:30 2017 +0200

----------------------------------------------------------------------
 .../src/app/classes/filtering.ts                |  22 +++-
 .../src/app/classes/models/app-state.ts         |   3 +-
 .../classes/queries/audit-logs-query-params.ts  |   3 +-
 .../service-logs-truncated-query-params.ts      |   3 +-
 .../src/app/classes/string.ts                   |  25 ++++
 .../filters-panel/filters-panel.component.html  |   8 +-
 .../filters-panel.component.spec.ts             |   1 +
 .../filters-panel/filters-panel.component.ts    |  99 ++++++++-------
 .../logs-container.component.html               |   2 +-
 .../logs-container/logs-container.component.ts  |   5 +-
 .../src/app/components/mixins.less              |   2 +-
 .../search-box/search-box.component.html        |  24 ++--
 .../search-box/search-box.component.less        |  23 ++--
 .../search-box/search-box.component.ts          | 122 ++++++++++++++-----
 .../src/app/components/variables.less           |   6 +-
 .../services/component-actions.service.spec.ts  |   4 +-
 .../app/services/component-actions.service.ts   |   2 +-
 .../component-generator.service.spec.ts         |   4 +-
 .../app/services/logs-container.service.spec.ts |   4 +-
 .../src/app/services/logs-container.service.ts  |  69 ++++++++---
 .../src/app/services/utils.service.ts           |   4 +
 21 files changed, 303 insertions(+), 132 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts
index 2a7205f..d92dd41 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/filtering.ts
@@ -18,9 +18,10 @@
 
 import {Moment, unitOfTime} from 'moment';
 import {ListItem} from '@app/classes/list-item';
+import {TimeRangeType, SortingType} from '@app/classes/string';
 
 export interface TimeUnit {
-  type: 'CURRENT' | 'LAST' | 'PAST';
+  type: TimeRangeType;
   unit: unitOfTime.DurationConstructor;
   interval?: number;
 }
@@ -33,7 +34,7 @@ export interface CustomTimeRange {
 
 export interface SortingConditions {
   key: string;
-  type: 'asc' | 'desc';
+  type: SortingType;
 }
 
 export interface TimeUnitListItem extends ListItem {
@@ -49,4 +50,21 @@ export interface FilterCondition {
   options?: (ListItem | TimeUnitListItem[])[];
   defaultSelection?: ListItem | ListItem[] | number;
   iconClass?: string;
+  fieldName?: string;
+}
+
+export interface SearchBoxParameter {
+  name: string;
+  value: string;
+  isExclude: boolean;
+}
+
+export interface SearchBoxParameterProcessed extends SearchBoxParameter {
+  id: number;
+  label: string;
+}
+
+export interface SearchBoxParameterTriggered {
+  value: string;
+  isExclude: boolean;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts
index afed497..c3279ce 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/models/app-state.ts
@@ -17,12 +17,13 @@
  */
 
 import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry';
+import {LogsType} from '@app/classes/string';
 
 export interface AppState {
   isAuthorized: boolean;
   isInitialLoading: boolean;
   isLoginInProgress: boolean;
-  activeLogsType?: string;
+  activeLogsType?: LogsType;
   isServiceLogsFileView: boolean;
   isServiceLogContextView: boolean;
   activeLog: ActiveServiceLogEntry | null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts
index 509fa04..3b38a03 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.ts
@@ -17,6 +17,7 @@
  */
 
 import {QueryParams} from '@app/classes/queries/query-params';
+import {SortingType} from '@app/classes/string';
 
 export const defaultParams = {
   page: '0',
@@ -35,7 +36,7 @@ export class AuditLogsQueryParams extends QueryParams {
   pageSize: string;
   startIndex: string;
   sortBy?: string;
-  sortType?: 'asc' | 'desc';
+  sortType?: SortingType;
   clusters?: string;
   mustBe?: string;
   mustNot?: string;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts
index 6f9de16..3b08e11 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.ts
@@ -17,6 +17,7 @@
  */
 
 import {QueryParams} from '@app/classes/queries/query-params';
+import {ScrollType} from '@app/classes/string';
 
 export const defaultParams = {
   numberRows: '10',
@@ -32,5 +33,5 @@ export class ServiceLogsTruncatedQueryParams extends QueryParams {
   host_name: string;
   component_name: string;
   numberRows: string;
-  scrollType: 'before' | 'after' | '';
+  scrollType: ScrollType;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts
new file mode 100644
index 0000000..21ff4ca
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/string.ts
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export type LogsType = 'auditLogs' | 'serviceLogs';
+
+export type TimeRangeType = 'CURRENT' | 'LAST' | 'PAST';
+
+export type SortingType = 'asc' | 'desc';
+
+export type ScrollType = 'before' | 'after' | '';

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
index 2d327a6..4fe169d 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
@@ -20,9 +20,9 @@
     <filter-dropdown *ngIf="isFilterConditionDisplayed('clusters')" [label]="filters.clusters.label"
                      formControlName="clusters" [options]="filters.clusters.options" [isMultipleChoice]="true"
                      class="filter-input"></filter-dropdown>
-    <search-box formControlName="query" [items]="searchBoxItemsTranslated" class="filter-input"
-                [parameterNameChangeSubject]="queryParameterNameChange"
-                [parameterAddSubject]="queryParameterAdd"></search-box>
+    <search-box [parameterAddSubject]="queryParameterAdd" [parameterNameChangeSubject]="queryParameterNameChange"
+                formControlName="query" [items]="searchBoxItemsTranslated" [itemsOptions]="options"
+                class="filter-input"></search-box>
     <time-range-picker *ngIf="isFilterConditionDisplayed('timeRange')" formControlName="timeRange"
                        class="filter-input"></time-range-picker>
     <timezone-picker class="filter-input"></timezone-picker>
@@ -31,7 +31,7 @@
     </button-->
   </div>
   <div class="filter-buttons col-md-4">
-    <dropdown-button [options]="searchBoxItems" iconClass="fa fa-search-minus" label="filter.excluded"
+    <dropdown-button [options]="searchBoxItems | async" iconClass="fa fa-search-minus" label="filter.excluded"
                      [hideCaret]="true" [showSelectedValue]="false" action="proceedWithExclude"></dropdown-button>
     <filter-button *ngIf="isFilterConditionDisplayed('hosts')" formControlName="hosts"
                    label="{{filters.hosts.label | translate}}" [iconClass]="filters.hosts.iconClass"

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
index 1f7e8db..c9f9b52 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
@@ -106,6 +106,7 @@ describe('FiltersPanelComponent', () => {
     component.filtersForm = new FormGroup({
       control: new FormControl()
     });
+    component.logsType = 'auditLogs';
     fixture.detectChanges();
   });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
index b41f7cd..01a8932 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
@@ -16,74 +16,83 @@
  * limitations under the License.
  */
 
-import {Component, Input} from '@angular/core';
+import {Component, OnChanges, SimpleChanges, Input} from '@angular/core';
 import {FormGroup} from '@angular/forms';
+import {Observable} from 'rxjs/Observable';
 import {Subject} from 'rxjs/Subject';
-import {TranslateService} from '@ngx-translate/core';
+import {FilterCondition} from '@app/classes/filtering';
 import {ListItem} from '@app/classes/list-item';
+import {LogsType} from '@app/classes/string';
 import {CommonEntry} from '@app/classes/models/common-entry';
-import {LogField} from '@app/classes/models/log-field';
 import {LogsContainerService} from '@app/services/logs-container.service';
-import {AppStateService} from '@app/services/storage/app-state.service';
 
 @Component({
   selector: 'filters-panel',
   templateUrl: './filters-panel.component.html',
   styleUrls: ['./filters-panel.component.less']
 })
-export class FiltersPanelComponent {
+export class FiltersPanelComponent implements OnChanges {
 
-  constructor(
-    private translate: TranslateService, private logsContainer: LogsContainerService,
-    private appState: AppStateService
-  ) {
-    appState.getParameter('activeLogsType').subscribe(value => {
-      this.logsType = value;
-      logsContainer.logsTypeMap[value].fieldsModel.getAll().subscribe((fields: LogField[]): void => {
-        if (fields.length) {
-          const items = fields.filter((field: LogField): boolean => {
-              return this.excludedParameters.indexOf(field.name) === -1;
-            }).map((field: LogField): CommonEntry => {
-              return {
-                name: field.displayName || field.name,
-                value: field.name
-              };
-            }),
-            labelKeys = items.map((item: CommonEntry): string => item.name);
-          this.searchBoxItems = items.map((item: CommonEntry): ListItem => {
-            return {
-              label: item.name,
-              value: item.value
-            };
-          });
-          translate.get(labelKeys).first().subscribe((translation: {[key: string]: string}): void => {
-            this.searchBoxItemsTranslated = items.map((item: CommonEntry): CommonEntry => {
-              return {
-                name: translation[item.name],
-                value: item.value
-              };
-            })
-          });
-        }
-      })
-    });
+  constructor(private logsContainer: LogsContainerService) {
+  }
+
+  ngOnChanges(changes: SimpleChanges): void {
+    if (changes.hasOwnProperty('logsType')) {
+      let result;
+      switch (changes.logsType.currentValue) {
+        case 'auditLogs':
+          result = this.logsContainer.auditLogsColumns;
+          break;
+        case 'serviceLogs':
+          result = this.logsContainer.serviceLogsColumns;
+          break;
+      }
+      this.searchBoxItems = result;
+    }
   }
 
   @Input()
   filtersForm: FormGroup;
 
-  private readonly excludedParameters = ['cluster', 'host', 'level', 'type', 'logtime'];
-
-  private logsType: string;
+  @Input()
+  logsType: LogsType;
 
-  searchBoxItems: ListItem[] = [];
+  searchBoxItems: Observable<ListItem[]>;
 
-  searchBoxItemsTranslated: CommonEntry[] = [];
+  get searchBoxItemsTranslated(): CommonEntry[] {
+    switch (this.logsType) {
+      case 'auditLogs':
+        return this.logsContainer.auditLogsColumnsTranslated;
+      case 'serviceLogs':
+        return this.logsContainer.serviceLogsColumnsTranslated;
+    }
+  }
 
-  get filters(): any {
+  get filters(): {[key: string]: FilterCondition} {
     return this.logsContainer.filters;
   }
 
+  /**
+   * Object with options for search box parameter values
+   * @returns {[key: string]: CommonEntry[]}
+   */
+  get options(): {[key: string]: CommonEntry[]} {
+    return Object.keys(this.filters).filter((key: string): boolean => {
+      const condition = this.filters[key];
+      return Boolean(condition.fieldName && condition.options);
+    }).reduce((currentValue, currentKey) => {
+      const condition = this.filters[currentKey];
+      return Object.assign(currentValue, {
+        [condition.fieldName]: condition.options.map((option: ListItem): CommonEntry => {
+          return {
+            name: option.value,
+            value: option.value
+          }
+        })
+      });
+    }, {});
+  }
+
   get queryParameterNameChange(): Subject<any> {
     return this.logsContainer.queryParameterNameChange;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
index f34dd15..13911bd 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
@@ -25,7 +25,7 @@
   </div>
 </div>
 <div class="container-fluid">
-  <filters-panel class="row" [filtersForm]="filtersForm"></filters-panel>
+  <filters-panel class="row" [filtersForm]="filtersForm" [logsType]="logsType"></filters-panel>
   <div class="row">
     <div *ngIf="autoRefreshRemainingSeconds" class="col-md-12">
       <div class="auto-refresh-message pull-right">

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
index b06cfa4..86709fb 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
@@ -30,6 +30,7 @@ import {BarGraph} from '@app/classes/models/bar-graph';
 import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry';
 import {HistogramOptions} from '@app/classes/histogram-options';
 import {ListItem} from '@app/classes/list-item';
+import {LogsType} from '@app/classes/string';
 
 @Component({
   selector: 'logs-container',
@@ -43,7 +44,7 @@ export class LogsContainerComponent {
     private tabsStorage: TabsService, private logsContainer: LogsContainerService
   ) {
     this.logsContainer.loadColumnsNames();
-    appState.getParameter('activeLogsType').subscribe((value: string) => this.logsType = value);
+    appState.getParameter('activeLogsType').subscribe((value: LogsType) => this.logsType = value);
     serviceLogsHistogramStorage.getAll().subscribe((data: BarGraph[]): void => {
       this.histogramData = this.logsContainer.getHistogramData(data);
     });
@@ -56,7 +57,7 @@ export class LogsContainerComponent {
     return this.logsContainer.filtersForm;
   };
 
-  private logsType: string;
+  private logsType: LogsType;
 
   get totalCount(): number {
     return this.logsContainer.totalCount;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/mixins.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/mixins.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/mixins.less
index 5fa265b..4460821 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/mixins.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/mixins.less
@@ -80,7 +80,7 @@
 
 .dropdown-list-default {
   line-height: 1;
-  border-radius: 2px;
+  border-radius: @dropdown-border-radius;
   font-size: 14px;
   min-width: @dropdown-min-width;
   background: #FFF;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
index 92f9520..5bffdc5 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
@@ -16,17 +16,25 @@
 -->
 
 <label class="parameter-label" *ngFor="let parameter of parameters">
-  <span *ngIf="parameter.isExclude" class="fa fa-search-minus exclude-icon"></span>
+  <span *ngIf="parameter.isExclude" class="fa fa-search-minus"></span>
   {{parameter.label | translate}}:
   <span class="parameter-value">{{parameter.value}}</span>
   <span class="fa fa-times remove-parameter" (click)="removeParameter($event, parameter.id)"></span>
 </label>
 <span class="active-parameter-label" *ngIf="isActive && activeItem">{{activeItem.name | translate}}:</span>
 <div [ngClass]="{'search-item-container': true, 'active': isActive, 'value': isValueInput}">
-  <input #parameterInput auto-complete [(ngModel)]="currentValue" [source]="items" [list-formatter]="itemsListFormatter"
-         display-property-name="name" (valueChanged)="changeParameterName({item: $event, isExclude: false})"
-         class="search-item-input parameter-input form-control">
-  <input #valueInput type="text" [(ngModel)]="currentValue" class="search-item-input value-input form-control"
-         (keyup)="onParameterValueChange($event)">
-  <div class="search-item-text" [innerHTML]="currentValue"></div>
-</div>
\ No newline at end of file
+  <span class="parameter-input-wrapper">
+    <input #parameterInput auto-complete class="search-item-input parameter-input form-control"
+           [(ngModel)]="currentValue" [source]="items" display-property-name="name"
+           [list-formatter]="itemsListFormatter" [value-formatter]="itemsValueFormatter" [match-formatted]="true"
+           (valueChanged)="changeParameterName({value: $event.value, isExclude: false})"
+           (keyup)="onParameterKeyUp($event)">
+  </span>
+  <span [ngClass]="{'no-value-options': !activeItemValueOptions.length}">
+    <input #valueInput auto-complete [(ngModel)]="currentValue" [source]="activeItemValueOptions"
+           [list-formatter]="itemsListFormatter" [value-formatter]="itemsValueFormatter" [match-formatted]="true"
+           (valueChanged)="onParameterValueChange($event.value)" (keydown)="onParameterValueKeyDown($event)"
+           (keyup)="onParameterValueKeyUp($event)" class="search-item-input value-input form-control">
+  </span>
+  <div class="search-item-text">{{currentValue}}</div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
index f0a5ce0..eac3bd6 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
@@ -35,16 +35,13 @@
   cursor: text;
 
   .parameter-label {
-    // TODO implement actual styles
     margin: @label-margin;
-    padding: @label-margin;
-    background-color: @main-background-color;
+    border-radius: @dropdown-border-radius;
+    padding: @search-parameter-padding;
+    background-color: @search-parameter-background-color;
+    color: @base-font-color;
     font-size: 0.8em;
 
-    .exclude-icon {
-      color: @exclude-color;
-    }
-
     .parameter-value {
       font-weight: normal;
     }
@@ -94,8 +91,16 @@
       }
 
       &.value {
-        /deep/ .ng2-auto-complete-wrapper, .parameter-input {
-          display: none;
+        .parameter-input-wrapper {
+          /deep/ .ng2-auto-complete-wrapper {
+            display: none;
+          }
+        }
+
+        .no-value-options {
+          /deep/ .ng2-auto-complete {
+            display: none;
+          }
         }
 
         .value-input {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
index 18ff715..14cc89b 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
@@ -19,6 +19,7 @@
 import {Component, OnInit, OnDestroy, Input, ViewChild, ElementRef, forwardRef} from '@angular/core';
 import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms';
 import {Subject} from 'rxjs/Subject';
+import {SearchBoxParameter, SearchBoxParameterProcessed, SearchBoxParameterTriggered} from '@app/classes/filtering';
 import {CommonEntry} from '@app/classes/models/common-entry';
 import {UtilsService} from '@app/services/utils.service';
 
@@ -42,7 +43,7 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
     this.rootElement.addEventListener('keydown', this.onRootKeyDown);
   }
 
-  ngOnInit() {
+  ngOnInit(): void {
     this.parameterInput = this.parameterInputRef.nativeElement;
     this.valueInput = this.valueInputRef.nativeElement;
     this.parameterInput.addEventListener('focus', this.onParameterInputFocus);
@@ -52,7 +53,7 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
     this.parameterAddSubject.subscribe(this.onParameterAdd);
   }
 
-  ngOnDestroy() {
+  ngOnDestroy(): void {
     this.rootElement.removeEventListener('click', this.onRootClick);
     this.rootElement.removeEventListener('keydown', this.onRootKeyDown);
     this.parameterInput.removeEventListener('focus', this.onParameterInputFocus);
@@ -62,6 +63,8 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
     this.parameterAddSubject.unsubscribe();
   }
 
+  private readonly messageParameterName: string = 'log_message';
+
   private currentId: number = 0;
 
   private isExclude: boolean = false;
@@ -80,10 +83,13 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
   items: CommonEntry[] = [];
 
   @Input()
-  parameterNameChangeSubject: Subject<any> = this.defaultSubject;
+  itemsOptions: {[key: string]: CommonEntry[]};
+
+  @Input()
+  parameterNameChangeSubject: Subject<SearchBoxParameterTriggered> = this.defaultSubject;
 
   @Input()
-  parameterAddSubject: Subject<any> = this.defaultSubject;
+  parameterAddSubject: Subject<SearchBoxParameter> = this.defaultSubject;
 
   @ViewChild('parameterInput')
   parameterInputRef: ElementRef;
@@ -93,13 +99,18 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
 
   private rootElement: HTMLElement;
 
-  private parameterInput: HTMLElement;
+  private parameterInput: HTMLInputElement;
+
+  private valueInput: HTMLInputElement;
 
-  private valueInput: HTMLElement;
+  activeItem: CommonEntry | null = null;
 
-  activeItem?: any;
+  parameters: SearchBoxParameterProcessed[] = [];
 
-  parameters: any[] = [];
+  get activeItemValueOptions(): CommonEntry[] {
+    return this.itemsOptions && this.activeItem && this.itemsOptions[this.activeItem.value] ?
+      this.itemsOptions[this.activeItem.value] : [];
+  }
 
   private onChange: (fn: any) => void;
 
@@ -133,52 +144,80 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
     }
   };
 
-  private getItem(name: string): CommonEntry {
-    return this.items.find(field => field.value === name);
+  private switchToParameterInput = (): void => {
+    this.activeItem = null;
+    this.isValueInput = false;
+    setTimeout(() => this.parameterInput.focus());
+  };
+
+  private getItemByValue(name: string): CommonEntry {
+    return this.items.find((field: CommonEntry): boolean => field.value === name);
+  }
+
+  private getItemByName(name: string): CommonEntry {
+    return this.items.find((field: CommonEntry): boolean => field.name === name);
   }
 
   clear(): void {
     this.isActive = false;
     this.activeItem = null;
-    this.currentValue = null;
+    this.currentValue = '';
+    this.parameterInput.value = '';
+    this.valueInput.value = '';
   }
 
   itemsListFormatter(item: CommonEntry): string {
     return item.name;
   }
 
-  changeParameterName(item: any): void {
-    this.parameterNameChangeSubject.next(item);
+  itemsValueFormatter(item: CommonEntry): string {
+    return item.value;
   }
 
-  onParameterNameChange = (options: any): void => {
-    this.activeItem = typeof options.item === 'string' ? this.getItem(options.item) : options.item;
-    this.isExclude = options.isExclude;
-    this.isActive = true;
-    this.isParameterInput = false;
-    this.isValueInput = true;
-    this.currentValue = '';
-    setTimeout(() => this.valueInput.focus(), 0);
+  changeParameterName(options: SearchBoxParameterTriggered): void {
+    this.parameterNameChangeSubject.next(options);
+  }
+
+  onParameterNameChange = (options: SearchBoxParameterTriggered): void => {
+    if (options.value) {
+      this.activeItem = this.getItemByValue(options.value);
+      this.isExclude = options.isExclude;
+      this.isActive = true;
+      this.isParameterInput = false;
+      this.isValueInput = true;
+      this.currentValue = '';
+      setTimeout(() => this.valueInput.focus(), 0);
+    }
   };
 
-  onParameterValueChange(event: KeyboardEvent): void {
+  onParameterValueKeyDown(event: KeyboardEvent): void {
+    if (this.utils.isBackSpacePressed(event) && !this.currentValue) {
+      this.switchToParameterInput();
+    }
+  }
+
+  onParameterValueKeyUp(event: KeyboardEvent): void {
     if (this.utils.isEnterPressed(event) && this.currentValue) {
+      this.onParameterValueChange(this.currentValue);
+    }
+  }
+
+  onParameterValueChange(value: string): void {
+    if (value) {
       this.parameters.push({
         id: this.currentId++,
         name: this.activeItem.value,
         label: this.activeItem.name,
-        value: this.currentValue,
+        value: value,
         isExclude: this.isExclude
       });
-      this.currentValue = '';
-      this.activeItem = null;
-      this.isValueInput = false;
       this.updateValue();
     }
+    this.switchToParameterInput();
   }
 
-  onParameterAdd = (options: any): void => {
-    const item = this.getItem(options.name);
+  onParameterAdd = (options: SearchBoxParameter): void => {
+    const item = this.getItemByValue(options.name);
     this.parameters.push({
       id: this.currentId++,
       name: options.name,
@@ -189,19 +228,38 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
     this.updateValue();
   };
 
+  onParameterKeyUp = (event: KeyboardEvent): void => {
+    if (this.utils.isEnterPressed(event) && this.currentValue) {
+      const existingItem = this.getItemByName(this.currentValue);
+      if (existingItem) {
+        this.changeParameterName({
+          value: this.currentValue,
+          isExclude: false
+        });
+      } else {
+        this.parameterAddSubject.next({
+          name: this.messageParameterName,
+          value: this.currentValue,
+          isExclude: false
+        });
+      }
+    }
+  };
+
   removeParameter(event: MouseEvent, id: number): void {
-    this.parameters = this.parameters.filter(parameter => parameter.id !== id);
+    this.parameters = this.parameters.filter((parameter: SearchBoxParameterProcessed): boolean => parameter.id !== id);
     this.updateValue();
     event.stopPropagation();
   }
 
-  updateValue() {
+  updateValue(): void {
+    this.currentValue = '';
     if (this.onChange) {
       this.onChange(this.parameters);
     }
   }
 
-  writeValue(parameters: any [] = []) {
+  writeValue(parameters: SearchBoxParameterProcessed[] = []): void {
     this.parameters = parameters;
     this.updateValue();
   }
@@ -210,7 +268,7 @@ export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccess
     this.onChange = callback;
   }
 
-  registerOnTouched() {
+  registerOnTouched(): void {
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
index 7b7fcae..18268ad 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
@@ -23,7 +23,6 @@
 @button-border-radius: 4px;
 @input-border-width: 1px;
 @input-border: @input-border-width solid #CFD3D7;
-@button-border-radius: 4px;
 @input-group-addon-padding: 6px 12px 6px 0;
 @block-margin-top: 20px;
 @link-color: #1491C1;
@@ -37,9 +36,11 @@
 @checkbox-top: 4px;
 @dropdown-min-width: 160px;
 @dropdown-max-height: 500px; // TODO get rid of magic number, base on actual design
+@dropdown-border-radius: 2px;
 @input-height: 34px;
 @input-padding: 10px;
 @col-padding: 15px;
+@search-parameter-padding: 5px 2px;
 
 @fatal-color: #830A0A;
 @error-color: #E81D1D;
@@ -51,6 +52,7 @@
 @submit-color: #5CB85C;
 @submit-hover-color: #449D44;
 @exclude-color: #EF6162;
+@search-parameter-background-color: #DDD;
 
 // Panels
 @panel-heading: rgba(255, 255, 255, 1);
@@ -63,4 +65,4 @@
 @icon-padding: 5px;
 
 // Table
-@table-border-color: #EEEEEE;
+@table-border-color: #EEE;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
index 6d43ff1..c2cee8d 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
@@ -17,6 +17,7 @@
  */
 
 import {TestBed, inject} from '@angular/core/testing';
+import {TranslationModules} from '@app/test-config.spec';
 import {StoreModule} from '@ngrx/store';
 import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
 import {AppStateService, appState} from '@app/services/storage/app-state.service';
@@ -62,7 +63,8 @@ describe('ComponentActionsService', () => {
           serviceLogsHistogramData,
           serviceLogsTruncated,
           tabs
-        })
+        }),
+        ...TranslationModules
       ],
       providers: [
         ComponentActionsService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
index e796183..0fc9fde 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
@@ -132,7 +132,7 @@ export class ComponentActionsService {
   }
 
   proceedWithExclude = (item: string): void => this.logsContainer.queryParameterNameChange.next({
-    item: item,
+    value: item,
     isExclude: true
   });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
index a161190..3f65cd1 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
@@ -17,6 +17,7 @@
  */
 
 import {TestBed, inject} from '@angular/core/testing';
+import {TranslationModules} from '@app/test-config.spec';
 import {StoreModule} from '@ngrx/store';
 import {HostsService, hosts} from '@app/services/storage/hosts.service';
 import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service';
@@ -60,7 +61,8 @@ describe('ComponentGeneratorService', () => {
           components,
           serviceLogsTruncated,
           tabs
-        })
+        }),
+        ...TranslationModules
       ],
       providers: [
         ComponentGeneratorService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
index 47cb25d..870058b 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
@@ -17,6 +17,7 @@
  */
 
 import {TestBed, inject} from '@angular/core/testing';
+import {TranslationModules} from '@app/test-config.spec';
 import {StoreModule} from '@ngrx/store';
 import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service';
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
@@ -61,7 +62,8 @@ describe('LogsContainerService', () => {
           hosts,
           serviceLogsTruncated,
           tabs
-        })
+        }),
+        ...TranslationModules
       ],
       providers: [
         AuditLogsService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
index a715adc..64b14b8 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
@@ -27,6 +27,7 @@ import 'rxjs/add/operator/first';
 import 'rxjs/add/operator/map';
 import 'rxjs/add/operator/takeUntil';
 import * as moment from 'moment-timezone';
+import {TranslateService} from '@ngx-translate/core';
 import {HttpClientService} from '@app/services/http-client.service';
 import {AuditLogsService} from '@app/services/storage/audit-logs.service';
 import {AuditLogsFieldsService} from '@app/services/storage/audit-logs-fields.service';
@@ -41,8 +42,11 @@ import {ClustersService} from '@app/services/storage/clusters.service';
 import {ComponentsService} from '@app/services/storage/components.service';
 import {HostsService} from '@app/services/storage/hosts.service';
 import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry';
-import {FilterCondition, TimeUnitListItem, SortingListItem} from '@app/classes/filtering';
+import {
+  FilterCondition, TimeUnitListItem, SortingListItem, SearchBoxParameter, SearchBoxParameterTriggered
+} from '@app/classes/filtering';
 import {ListItem} from '@app/classes/list-item';
+import {LogsType, ScrollType, SortingType} from '@app/classes/string';
 import {Tab} from '@app/classes/models/tab';
 import {LogField} from '@app/classes/models/log-field';
 import {AuditLog} from '@app/classes/models/audit-log';
@@ -51,14 +55,15 @@ import {ServiceLog} from '@app/classes/models/service-log';
 import {ServiceLogField} from '@app/classes/models/service-log-field';
 import {BarGraph} from '@app/classes/models/bar-graph';
 import {NodeItem} from '@app/classes/models/node-item';
+import {CommonEntry} from '@app/classes/models/common-entry';
 
 @Injectable()
 export class LogsContainerService {
 
   constructor(
-    private httpClient: HttpClientService, private auditLogsStorage: AuditLogsService,
-    private auditLogsFieldsStorage: AuditLogsFieldsService, private serviceLogsStorage: ServiceLogsService,
-    private serviceLogsFieldsStorage: ServiceLogsFieldsService,
+    private translate: TranslateService, private httpClient: HttpClientService,
+    private auditLogsStorage: AuditLogsService, private auditLogsFieldsStorage: AuditLogsFieldsService,
+    private serviceLogsStorage: ServiceLogsService, private serviceLogsFieldsStorage: ServiceLogsFieldsService,
     private serviceLogsHistogramStorage: ServiceLogsHistogramDataService,
     private serviceLogsTruncatedStorage: ServiceLogsTruncatedService, private appState: AppStateService,
     private appSettings: AppSettingsService, private tabsStorage: TabsService, private clustersStorage: ClustersService,
@@ -78,7 +83,7 @@ export class LogsContainerService {
     this.loadHosts();
     appState.getParameter('activeLog').subscribe((value: ActiveServiceLogEntry | null) => this.activeLog = value);
     appState.getParameter('isServiceLogsFileView').subscribe((value: boolean) => this.isServiceLogsFileView = value);
-    appState.getParameter('activeLogsType').subscribe((value: string) => this.activeLogsType = value);
+    appState.getParameter('activeLogsType').subscribe((value: LogsType) => this.activeLogsType = value);
     appSettings.getParameter('timeZone').subscribe((value: string) => this.timeZone = value || this.defaultTimeZone);
     tabsStorage.mapCollection((tab: Tab): Tab => {
       let currentAppState = tab.appState || {};
@@ -111,6 +116,8 @@ export class LogsContainerService {
         this.loadLogs();
       });
     });
+    this.auditLogsColumns.subscribe(this.getTranslationKeysSubscriber('auditLogsColumnsTranslated'));
+    this.serviceLogsColumns.subscribe(this.getTranslationKeysSubscriber('serviceLogsColumnsTranslated'));
   }
 
   private readonly paginationOptions: string[] = ['10', '25', '50', '100'];
@@ -119,7 +126,8 @@ export class LogsContainerService {
     clusters: {
       label: 'filter.clusters',
       options: [],
-      defaultSelection: []
+      defaultSelection: [],
+      fieldName: 'cluster'
     },
     timeRange: {
       options: [
@@ -346,7 +354,8 @@ export class LogsContainerService {
       label: 'filter.components',
       iconClass: 'fa fa-cubes',
       options: [],
-      defaultSelection: []
+      defaultSelection: [],
+      fieldName: 'type'
     },
     levels: {
       label: 'filter.levels',
@@ -381,13 +390,15 @@ export class LogsContainerService {
           value: 'UNKNOWN'
         }
       ],
-      defaultSelection: []
+      defaultSelection: [],
+      fieldName: 'level'
     },
     hosts: {
       label: 'filter.hosts',
       iconClass: 'fa fa-server',
       options: [],
-      defaultSelection: []
+      defaultSelection: [],
+      fieldName: 'host'
     },
     auditLogsSorting: {
       label: 'sorting.title',
@@ -533,7 +544,7 @@ export class LogsContainerService {
 
   activeLog: ActiveServiceLogEntry | null = null;
 
-  activeLogsType: string;
+  activeLogsType: LogsType;
 
   private filtersFormChange: Subject<any> = new Subject();
 
@@ -561,10 +572,30 @@ export class LogsContainerService {
     }
   }
 
+  private getTranslationKeysSubscriber = (propertyName: string): (items: ListItem[]) => void  => {
+    return (items: ListItem[]): void => {
+      const keys = items.map((item: ListItem): string => item.label);
+      if (keys.length) {
+        this.translate.get(keys).first().subscribe((translation: {[key: string]: string}): void => {
+          this[propertyName] = items.map((item: ListItem): CommonEntry => {
+            return {
+              name: translation[item.label],
+              value: item.value
+            };
+          });
+        });
+      }
+    };
+  };
+
   auditLogsColumns: Observable<ListItem[]> = this.auditLogsFieldsStorage.getAll().map(this.columnsMapper);
 
+  auditLogsColumnsTranslated: CommonEntry[] = [];
+
   serviceLogsColumns: Observable<ListItem[]> = this.serviceLogsFieldsStorage.getAll().map(this.columnsMapper);
 
+  serviceLogsColumnsTranslated: CommonEntry[] = [];
+
   serviceLogs: Observable<ServiceLog[]> = Observable.combineLatest(this.serviceLogsStorage.getAll(), this.serviceLogsColumns).map(this.logsMapper);
 
   auditLogs: Observable<AuditLog[]> = Observable.combineLatest(this.auditLogsStorage.getAll(), this.auditLogsColumns).map(this.logsMapper);
@@ -593,9 +624,9 @@ export class LogsContainerService {
     };
   }
 
-  queryParameterNameChange: Subject<any> = new Subject();
+  queryParameterNameChange: Subject<SearchBoxParameterTriggered> = new Subject();
 
-  queryParameterAdd: Subject<any> = new Subject();
+  queryParameterAdd: Subject<SearchBoxParameter> = new Subject();
 
   private stopTimer: Subject<any> = new Subject();
 
@@ -611,7 +642,7 @@ export class LogsContainerService {
 
   private stopCaptureTime: number;
 
-  loadLogs = (logsType: string = this.activeLogsType): void => {
+  loadLogs = (logsType: LogsType = this.activeLogsType): void => {
     this.httpClient.get(logsType, this.getParams('listFilters')).subscribe((response: Response): void => {
       const jsonResponse = response.json(),
         model = this.logsTypeMap[logsType].logsModel;
@@ -640,7 +671,7 @@ export class LogsContainerService {
     }
   };
 
-  loadLogContext(id: string, hostName: string, componentName: string, scrollType: 'before' | 'after' | '' = ''): void {
+  loadLogContext(id: string, hostName: string, componentName: string, scrollType: ScrollType = ''): void {
     const params = {
       id: id,
       host_name: hostName,
@@ -671,7 +702,7 @@ export class LogsContainerService {
     });
   }
 
-  private getParams(filtersMapName: string, logsType: string = this.activeLogsType): {[key: string]: string} {
+  private getParams(filtersMapName: string, logsType: LogsType = this.activeLogsType): {[key: string]: string} {
     let params = {};
     this.logsTypeMap[logsType][filtersMapName].forEach((key: string): void => {
       const inputValue = this.filtersForm.getRawValue()[key],
@@ -787,11 +818,11 @@ export class LogsContainerService {
     return endMoment ? endMoment.toISOString() : '';
   };
 
-  private getQuery(isExclude: boolean): (value: any[]) => string {
-    return (value: any[]): string => {
+  private getQuery(isExclude: boolean): (value: SearchBoxParameter[]) => string {
+    return (value: SearchBoxParameter[]): string => {
       let parameters;
       if (value && value.length) {
-        parameters = value.filter(item => item.isExclude === isExclude).map(parameter => {
+        parameters = value.filter((item: SearchBoxParameter): boolean => item.isExclude === isExclude).map((parameter: SearchBoxParameter): {[key: string]: string} => {
           return {
             [parameter.name]: parameter.value.replace(/\s/g, '+')
           };
@@ -801,7 +832,7 @@ export class LogsContainerService {
     }
   }
 
-  private getSortType(selection: SortingListItem[] = []): 'asc' | 'desc' {
+  private getSortType(selection: SortingListItem[] = []): SortingType {
     return selection[0] && selection[0].value ? selection[0].value.type : 'desc';
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bf3c8ed/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
index 175b585..dd9075c 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
@@ -77,6 +77,10 @@ export class UtilsService {
     return event.keyCode === 13;
   }
 
+  isBackSpacePressed(event: KeyboardEvent): boolean {
+    return event.keyCode === 8;
+  }
+
   isDifferentDates(dateA, dateB, timeZone): boolean {
     const momentA = moment(dateA).tz(timeZone),
       momentB = moment(dateB).tz(timeZone);


[10/24] ambari git commit: AMBARI-22465 Post-Upgrade Tasks Use the Wrong Repository and Hooks Folders (dgrinenko)

Posted by rl...@apache.org.
AMBARI-22465 Post-Upgrade Tasks Use the Wrong Repository and Hooks Folders (dgrinenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/82af6fb6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/82af6fb6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/82af6fb6

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 82af6fb6d3f11476df203d9871cc978091891cfe
Parents: 020e152
Author: Dmytro Grinenko <ha...@apache.org>
Authored: Tue Nov 28 19:23:28 2017 +0200
Committer: Dmytro Grinenko <ha...@apache.org>
Committed: Tue Nov 28 19:25:28 2017 +0200

----------------------------------------------------------------------
 .../server/controller/internal/UpgradeResourceProvider.java  | 8 --------
 .../resources/custom_actions/scripts/ru_execute_tasks.py     | 5 +++--
 .../src/test/python/custom_actions/test_ru_execute_tasks.py  | 6 +++---
 3 files changed, 6 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/82af6fb6/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index c5303cc..bab5369 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -1053,10 +1053,6 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     // Apply additional parameters to the command that come from the stage.
     applyAdditionalParameters(wrapper, params);
 
-    // the ru_execute_tasks invokes scripts - it needs information about where
-    // the scripts live and for that it should always use the target repository
-    // stack
-    applyRepositoryAssociatedParameters(wrapper, effectiveRepositoryVersion.getStackId(), params);
 
     // add each host to this stage
     RequestResourceFilter filter = new RequestResourceFilter(serviceName, componentName,
@@ -1204,10 +1200,6 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     // Apply additional parameters to the command that come from the stage.
     applyAdditionalParameters(wrapper, commandParams);
 
-    // add things like hooks and service folders based on effective repo
-    applyRepositoryAssociatedParameters(wrapper, effectiveRepositoryVersion.getStackId(),
-        commandParams);
-
     ActionExecutionContext actionContext = new ActionExecutionContext(cluster.getClusterName(),
         "SERVICE_CHECK", filters, commandParams);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/82af6fb6/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py b/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
index c0f0d41..dff4ee1 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
@@ -102,8 +102,8 @@ class ExecuteUpgradeTasks(Script):
     version = default('/roleParams/version', None)
 
     # These 2 variables are optional
-    service_package_folder = default('/roleParams/service_package_folder', None)
-    hooks_folder = default('/roleParams/hooks_folder', None)
+    service_package_folder = default('/commandParams/service_package_folder', None)
+    hooks_folder = default('/commandParams/hooks_folder', None)
 
     tasks = json.loads(config['roleParams']['tasks'])
     if tasks:
@@ -154,5 +154,6 @@ class ExecuteUpgradeTasks(Script):
           task.command = replace_variables(task.command, host_name, version)
           shell.checked_call(task.command, logoutput=True, quiet=True)
 
+
 if __name__ == "__main__":
   ExecuteUpgradeTasks().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/82af6fb6/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py b/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
index 0d12a91..17d5e77 100644
--- a/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
+++ b/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
@@ -146,8 +146,8 @@ class TestRUExecuteTasks(RMFTestCase):
     with open(json_file_path, "r") as json_file:
       json_payload = json.load(json_file)
 
-    del json_payload['roleParams']['service_package_folder']
-    del json_payload['roleParams']['hooks_folder']
+    del json_payload['commandParams']['service_package_folder']
+    del json_payload['commandParams']['hooks_folder']
 
     config_dict = ConfigDictionary(json_payload)
 
@@ -166,7 +166,7 @@ class TestRUExecuteTasks(RMFTestCase):
     # Ensure that the json file was actually read.
     stack_name = default("/hostLevelParams/stack_name", None)
     stack_version = default("/hostLevelParams/stack_version", None)
-    service_package_folder = default('/roleParams/service_package_folder', None)
+    service_package_folder = default('/commandParams/service_package_folder', None)
 
     self.assertEqual(stack_name, "HDP")
     self.assertEqual(stack_version, '2.2')


[11/24] ambari git commit: AMBARI-22458 - SNMP Trap should contain sysuptime field filled (unit test fixes) (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22458 - SNMP Trap should contain sysuptime field filled (unit test fixes) (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aa652afe
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aa652afe
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aa652afe

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: aa652afe18c26ebbb52ffa1d8ecfad4270a093d7
Parents: 82af6fb
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Nov 28 12:23:31 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Nov 28 12:31:13 2017 -0500

----------------------------------------------------------------------
 .../dispatchers/AmbariSNMPDispatcherTest.java   | 23 +++++----
 .../AlertNoticeDispatchServiceTest.java         | 53 +++++++++++++++++---
 2 files changed, 59 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aa652afe/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcherTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcherTest.java b/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcherTest.java
index 0b299f6..14b6216 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcherTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/notifications/dispatchers/AmbariSNMPDispatcherTest.java
@@ -21,11 +21,11 @@ package org.apache.ambari.server.notifications.dispatchers;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.any;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.doNothing;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.eq;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
@@ -36,6 +36,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.ambari.server.notifications.DispatchCallback;
@@ -90,7 +91,7 @@ public class AmbariSNMPDispatcherTest {
         AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv1;
         Notification notification = mock(AlertNotification.class);
         notification.Callback = mock(DispatchCallback.class);
-        notification.CallbackIds = new ArrayList<>();
+        notification.CallbackIds = mock(List.class);
         Map<String, String> properties = new HashMap<>();
         properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
         properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
@@ -109,7 +110,7 @@ public class AmbariSNMPDispatcherTest {
         AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
         Notification notification = mock(AlertNotification.class);
         notification.Callback = mock(DispatchCallback.class);
-        notification.CallbackIds = new ArrayList<>();
+        notification.CallbackIds = mock(List.class);
         notification.DispatchProperties = new HashMap<>();
         dispatcher.dispatch(notification);
         verify(notification.Callback).onFailure(notification.CallbackIds);
@@ -121,7 +122,7 @@ public class AmbariSNMPDispatcherTest {
         AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
         Notification notification = getAlertNotification(true);
         notification.Callback = mock(DispatchCallback.class);
-        notification.CallbackIds = new ArrayList<>();
+        notification.CallbackIds = mock(List.class);
         Map<String, String> properties = new HashMap<>();
         properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
         properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
@@ -137,7 +138,7 @@ public class AmbariSNMPDispatcherTest {
         AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
         Notification notification = getAlertNotification(true);
         notification.Callback = mock(DispatchCallback.class);
-        notification.CallbackIds = new ArrayList<>();
+        notification.CallbackIds = mock(List.class);
         Map<String, String> properties = new HashMap<>();
         properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
         properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
@@ -209,7 +210,7 @@ public class AmbariSNMPDispatcherTest {
         AmbariSNMPDispatcher.SnmpVersion snmpVersion = AmbariSNMPDispatcher.SnmpVersion.SNMPv2c;
         Notification notification = mock(AlertNotification.class);
         notification.Callback = mock(DispatchCallback.class);
-        notification.CallbackIds = new ArrayList<>();
+        notification.CallbackIds = mock(List.class);
         Map<String, String> properties = new HashMap<>();
         properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "3");
         properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "4");
@@ -227,7 +228,7 @@ public class AmbariSNMPDispatcherTest {
         AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(DEFAULT_SNMP_PORT);
         Notification notification = getAlertNotification(true);
         notification.Callback = mock(DispatchCallback.class);
-        notification.CallbackIds = new ArrayList<>();
+        notification.CallbackIds = mock(List.class);
         Map<String, String> properties = new HashMap<>();
         properties.put(AmbariSNMPDispatcher.PORT_PROPERTY, "162");
         properties.put(AmbariSNMPDispatcher.COMMUNITY_PROPERTY, "public");
@@ -256,7 +257,7 @@ public class AmbariSNMPDispatcherTest {
         for (VariableBinding variableBinding : pdu.toArray()) {
             variableBindings.put(variableBinding.getOid().toString(), variableBinding);
         }
-        assertEquals(10, variableBindings.size());
+    assertEquals(11, variableBindings.size());
         assertEquals(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID, variableBindings.get(SnmpConstants.snmpTrapOID.toString()).toValueString());
         assertTrue(variableBindings.get(SnmpConstants.snmpTrapOID.toString()).getVariable() instanceof OID);
         assertEquals(String.valueOf(DEFINITION_ID), variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID).toValueString());
@@ -288,7 +289,7 @@ public class AmbariSNMPDispatcherTest {
         for (VariableBinding variableBinding : pdu.toArray()) {
             variableBindings.put(variableBinding.getOid().toString(), variableBinding);
         }
-        assertEquals(10, variableBindings.size());
+    assertEquals(11, variableBindings.size());
         assertEquals("null", variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID).toValueString());
     }
 
@@ -305,7 +306,7 @@ public class AmbariSNMPDispatcherTest {
             variableBindings.put(variableBinding.getOid().toString(), variableBinding);
         }
 
-        assertEquals(10, variableBindings.size());
+    assertEquals(11, variableBindings.size());
         assertEquals(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID, variableBindings.get(SnmpConstants.snmpTrapOID.toString()).toValueString());
         assertEquals(String.valueOf(DEFINITION_ID), variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID).toValueString());
         assertEquals(DEFINITION_NAME, variableBindings.get(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_NAME_OID).toValueString());

http://git-wip-us.apache.org/repos/asf/ambari/blob/aa652afe/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
index 0bb118a..c86c51d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
@@ -19,11 +19,15 @@ package org.apache.ambari.server.state.services;
 
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
 import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.Calendar;
@@ -36,27 +40,39 @@ import java.util.UUID;
 import java.util.Vector;
 import java.util.concurrent.Executor;
 
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.notifications.DispatchFactory;
 import org.apache.ambari.server.notifications.Notification;
 import org.apache.ambari.server.notifications.NotificationDispatcher;
 import org.apache.ambari.server.notifications.TargetConfigurationResult;
 import org.apache.ambari.server.notifications.dispatchers.AmbariSNMPDispatcher;
-import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.AlertDispatchDAO;
+import org.apache.ambari.server.orm.dao.AlertsDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
 import org.apache.ambari.server.orm.entities.AlertNoticeEntity;
 import org.apache.ambari.server.orm.entities.AlertTargetEntity;
+import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.AlertState;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.NotificationState;
 import org.apache.ambari.server.state.alert.Scope;
 import org.apache.ambari.server.state.alert.SourceType;
 import org.apache.ambari.server.state.alert.TargetType;
+import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.EasyMock;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.easymock.PowerMock;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 import org.snmp4j.CommandResponder;
 import org.snmp4j.CommandResponderEvent;
 import org.snmp4j.PDU;
@@ -68,6 +84,7 @@ import org.snmp4j.smi.GenericAddress;
 import org.snmp4j.smi.Integer32;
 import org.snmp4j.smi.OID;
 import org.snmp4j.smi.OctetString;
+import org.snmp4j.smi.TimeTicks;
 import org.snmp4j.smi.VariableBinding;
 import org.snmp4j.transport.DefaultUdpTransportMapping;
 
@@ -75,11 +92,12 @@ import com.google.inject.Binder;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.Module;
-import com.google.inject.util.Modules;
 
 /**
  * Tests the {@link AlertNoticeDispatchService}.
  */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ AmbariSNMPDispatcher.class, ManagementFactory.class })
 public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
 
   final static String ALERT_NOTICE_UUID_1 = UUID.randomUUID().toString();
@@ -92,6 +110,7 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
   private DispatchFactory m_dispatchFactory = null;
   private AlertDispatchDAO m_dao = null;
   private Injector m_injector;
+  private RuntimeMXBean m_runtimeMXBean;
 
   List<AlertDefinitionEntity> m_definitions = new ArrayList<>();
   List<AlertHistoryEntity> m_histories = new ArrayList<>();
@@ -103,8 +122,7 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
     m_metaInfo = createNiceMock(AmbariMetaInfo.class);
 
     // create an injector which will inject the mocks
-    m_injector = Guice.createInjector(Modules.override(
-        new InMemoryDefaultTestModule()).with(new MockModule()));
+    m_injector = Guice.createInjector(new MockModule());
 
     Assert.assertNotNull(m_injector);
 
@@ -150,7 +168,17 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
         m_histories.add(history);
       }
     }
-  }
+
+    // mock out the uptime to be a while (since most tests are not testing
+    // system uptime)
+    m_runtimeMXBean = EasyMock.createNiceMock(RuntimeMXBean.class);
+    PowerMock.mockStatic(ManagementFactory.class);
+    expect(ManagementFactory.getRuntimeMXBean()).andReturn(m_runtimeMXBean).atLeastOnce();
+    PowerMock.replay(ManagementFactory.class);
+    expect(m_runtimeMXBean.getUptime()).andReturn(360000L).atLeastOnce();
+
+    replay( m_runtimeMXBean);
+    }
 
   /**
    * Tests the parsing of the {@link AlertHistoryEntity} list into
@@ -381,7 +409,8 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
 
     List<Vector> expectedTrapVectors = new LinkedList<>();
     Vector firstVector = new Vector();
-    firstVector.add(new VariableBinding(SnmpConstants.snmpTrapOID, new OID(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID)));
+    firstVector.add(new VariableBinding(SnmpConstants.sysUpTime, new TimeTicks(360000L)));
+    firstVector.add(new VariableBinding(SnmpConstants.snmpTrapOID, new OID(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID)));    
     firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID), new Integer32(new BigDecimal(1L).intValueExact())));
     firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_NAME_OID), new OctetString("alert-definition-1")));
     firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_HASH_OID), new OctetString("1")));
@@ -762,9 +791,20 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
      */
     @Override
     public void configure(Binder binder) {
+      Cluster cluster = EasyMock.createNiceMock(Cluster.class);
       binder.bind(AlertDispatchDAO.class).toInstance(m_dao);
       binder.bind(DispatchFactory.class).toInstance(m_dispatchFactory);
+      binder.bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
       binder.bind(AmbariMetaInfo.class).toInstance(m_metaInfo);
+      binder.bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
+      binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      binder.bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+      binder.bind(Cluster.class).toInstance(cluster);
+      binder.bind(AlertDefinitionDAO.class).toInstance(createNiceMock(AlertDefinitionDAO.class));
+      binder.bind(AlertsDAO.class).toInstance(createNiceMock(AlertsDAO.class));
+      binder.bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+
+      binder.bind(AlertNoticeDispatchService.class).toInstance(new AlertNoticeDispatchService());
 
       EasyMock.expect(m_metaInfo.getServerVersion()).andReturn("2.0.0").anyTimes();
       EasyMock.replay(m_metaInfo);
@@ -782,6 +822,7 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
       receivedTrapsVectors = new LinkedList<>();
 
       CommandResponder trapPrinter = new CommandResponder() {
+        @Override
         public synchronized void processPdu(CommandResponderEvent e){
           PDU command = e.getPDU();
           if (command != null) {


[16/24] ambari git commit: AMBARI-22545 - Tie MapReduce to Hive and Tez For Patch Upgrades (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22545 - Tie MapReduce to Hive and Tez For Patch Upgrades (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff3eca0e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff3eca0e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff3eca0e

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: ff3eca0ef9f33c45c162669a94a61a6a172ece3a
Parents: 0cf8276
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Nov 29 09:47:46 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Nov 29 10:59:02 2017 -0500

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.0.6/properties/stack_packages.json    | 4 +++-
 .../main/resources/stacks/HDP/3.0/properties/stack_packages.json | 4 +++-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ff3eca0e/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
index 794d2b0..1783655 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
@@ -1277,7 +1277,9 @@
       }
     },
     "upgrade-dependencies" : {
-      "HIVE": ["TEZ"],
+      "HIVE": ["TEZ", "MAPREDUCE2"],
+      "TEZ": ["HIVE"],
+      "MAPREDUCE2": ["HIVE"],
       "MAHOUT": ["MAPREDUCE2"],
       "OOZIE": ["MAPREDUCE2"]
     }    

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff3eca0e/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
index 5fb61c8..4b4d0bb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
@@ -1154,7 +1154,9 @@
       }
     },
     "upgrade-dependencies" : {
-      "HIVE": ["TEZ"],
+      "HIVE": ["TEZ", "MAPREDUCE2"],
+      "TEZ": ["HIVE"],
+      "MAPREDUCE2": ["HIVE"],
       "MAHOUT": ["MAPREDUCE2"],
       "OOZIE": ["MAPREDUCE2"]
     }    


[21/24] ambari git commit: AMBARI-22480. Validate blueprint does not allow lzo enable without setup with license agreement. JavaDocs update. (mpapirkovskyy)

Posted by rl...@apache.org.
AMBARI-22480. Validate blueprint does not allow lzo enable without setup with license agreement. JavaDocs update. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/82692bd2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/82692bd2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/82692bd2

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 82692bd29d6cbc4f1eba0ae4e442e1d691843cb2
Parents: 9bbc0ef
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Tue Nov 21 17:16:51 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Wed Nov 29 18:30:08 2017 +0200

----------------------------------------------------------------------
 .../main/java/org/apache/ambari/server/topology/BlueprintImpl.java  | 1 +
 .../java/org/apache/ambari/server/topology/BlueprintValidator.java  | 1 +
 2 files changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/82692bd2/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
index 6801e33..be3547a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
@@ -351,6 +351,7 @@ public class BlueprintImpl implements Blueprint {
    * Validate blueprint configuration.
    *
    * @throws InvalidTopologyException if the blueprint configuration is invalid
+   * @throws GPLLicenseNotAcceptedException ambari was configured to use gpl software, but gpl license is not accepted
    */
   @Override
   public void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/82692bd2/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
index 156fe8c..0f83168 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
@@ -36,6 +36,7 @@ public interface BlueprintValidator {
    * This doesn't include password properties.
    *
    * @throws InvalidTopologyException if required properties are not set in blueprint
+   * @throws GPLLicenseNotAcceptedException ambari was configured to use gpl software, but gpl license is not accepted
    */
   void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException;
 }


[15/24] ambari git commit: AMBARI-22534 Package install fails with HDP-2.5.3/2.6.4.0 version at atlas falcon plugin install (dgrinenko)

Posted by rl...@apache.org.
AMBARI-22534 Package install fails with HDP-2.5.3/2.6.4.0 version at atlas falcon plugin install (dgrinenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0cf82767
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0cf82767
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0cf82767

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 0cf82767c8f90ff858b678dde98c3eaba7d1fad4
Parents: 7fd404a
Author: Dmytro Grinenko <ha...@apache.org>
Authored: Wed Nov 29 11:04:54 2017 +0200
Committer: Dmytro Grinenko <ha...@apache.org>
Committed: Wed Nov 29 11:07:26 2017 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.5/services/FALCON/metainfo.xml |  4 +--
 .../stacks/HDP/2.6/services/FALCON/metainfo.xml | 26 ++++++++++++++++++++
 2 files changed, 28 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0cf82767/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
index 1066f6c..79296c3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
@@ -30,7 +30,7 @@
               <name>falcon_${stack_version}</name>
             </package>
             <package>
-              <name>atlas-metadata_${stack_version}-falcon-plugin</name>
+              <name>atlas-metadata_${stack_version}-hive-plugin</name>
               <condition>should_install_falcon_atlas_hook</condition>
             </package>
           </packages>
@@ -42,7 +42,7 @@
               <name>falcon-${stack_version}</name>
             </package>
             <package>
-              <name>atlas-metadata-${stack_version}-falcon-plugin</name>
+              <name>atlas-metadata-${stack_version}-hive-plugin</name>
               <condition>should_install_falcon_atlas_hook</condition>
             </package>
           </packages>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0cf82767/ambari-server/src/main/resources/stacks/HDP/2.6/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/FALCON/metainfo.xml
index fdeca9b..c4a0c98 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/FALCON/metainfo.xml
@@ -21,6 +21,32 @@
     <service>
       <name>FALCON</name>
       <version>0.10.0</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>falcon_${stack_version}</name>
+            </package>
+            <package>
+              <name>atlas-metadata_${stack_version}-falcon-plugin</name>
+              <condition>should_install_falcon_atlas_hook</condition>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>falcon-${stack_version}</name>
+            </package>
+            <package>
+              <name>atlas-metadata-${stack_version}-falcon-plugin</name>
+              <condition>should_install_falcon_atlas_hook</condition>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>


[17/24] ambari git commit: AMBARI-22542. Upgrade Ember and jquery version for Ambari Views (pallavkul)

Posted by rl...@apache.org.
AMBARI-22542. Upgrade Ember and jquery version for Ambari Views (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/77503985
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/77503985
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/77503985

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 7750398508b7e4dc223379be107610b5a0e792fd
Parents: ff3eca0
Author: pallavkul <pa...@gmail.com>
Authored: Wed Nov 29 21:53:49 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Nov 29 21:53:49 2017 +0530

----------------------------------------------------------------------
 contrib/views/files/src/main/resources/ui/bower.json             | 4 ++--
 .../views/hive-next/src/main/resources/ui/hive-web/bower.json    | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/77503985/contrib/views/files/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/bower.json b/contrib/views/files/src/main/resources/ui/bower.json
index f1472ee..dacb523 100644
--- a/contrib/views/files/src/main/resources/ui/bower.json
+++ b/contrib/views/files/src/main/resources/ui/bower.json
@@ -1,12 +1,12 @@
 {
   "name": "files-view",
   "dependencies": {
-    "ember": "2.2.0",
+    "ember": "2.2.1",
     "ember-cli-shims": "0.1.0",
     "ember-cli-test-loader": "0.2.2",
     "ember-load-initializers": "0.1.7",
     "ember-qunit-notifications": "0.1.0",
-    "jquery": "1.11.3",
+    "jquery": "2.x",
     "loader.js": "^3.5.0",
     "qunit": "~1.20.0",
     "bootstrap": "~3.3.6",

http://git-wip-us.apache.org/repos/asf/ambari/blob/77503985/contrib/views/hive-next/src/main/resources/ui/hive-web/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/bower.json b/contrib/views/hive-next/src/main/resources/ui/hive-web/bower.json
index 62861b2..e467a59 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/bower.json
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/bower.json
@@ -1,8 +1,8 @@
 {
   "name": "hive",
   "dependencies": {
-    "jquery": "1.11.3",
-    "ember": "1.10.0",
+    "jquery": "2.x",
+    "ember": "1.11.4",
     "ember-data": "1.0.0-beta.16.1",
     "ember-resolver": "~0.1.12",
     "loader.js": "stefanpenner/loader.js#3.2.0",


[07/24] ambari git commit: AMBARI-22525 - ATS start failed during patch upgrade due to CNF SparkATSPlugin (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22525 - ATS start failed during patch upgrade due to CNF SparkATSPlugin (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3fa1289e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3fa1289e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3fa1289e

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 3fa1289eaaf9c30985bcf5a50725080a9eb9b368
Parents: 393fdb8
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Nov 27 13:58:38 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Nov 28 10:24:31 2017 -0500

----------------------------------------------------------------------
 .../libraries/functions/component_version.py          |  9 +++++----
 .../YARN/2.1.0.2.0/package/scripts/params_linux.py    |  6 ++++++
 .../YARN/3.0.0.3.0/package/scripts/params_linux.py    |  6 ++++++
 .../HDP/2.5/services/YARN/configuration/yarn-site.xml |  4 ++--
 .../stacks/HDP/2.5/services/stack_advisor.py          |  2 +-
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml        | 14 ++++++++++++--
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml       | 10 ++++++++--
 .../resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml |  4 ++++
 .../HDP/3.0/services/YARN/configuration/yarn-site.xml |  4 ++--
 .../1.0/services/FAKEYARN/configuration/yarn-site.xml |  4 ++--
 .../python/stacks/2.5/common/test_stack_advisor.py    |  2 +-
 11 files changed, 49 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
index 169b339..fd18232 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 from resource_management.libraries.script.script import Script
 
-def get_component_repository_version(service_name = None, component_name = None):
+def get_component_repository_version(service_name = None, component_name = None, default_value = None):
   """
   Gets the version associated with the specified component from the structure in the command.
   Every command should contain a mapping of service/component to the desired repository it's set
@@ -28,22 +28,23 @@ def get_component_repository_version(service_name = None, component_name = None)
 
   :service_name: the name of the service
   :component_name: the name of the component
+  :default_value: the value to return if either the service or the component are not found
   """
   config = Script.get_config()
 
   versions = _get_component_repositories(config)
   if versions is None:
-    return None
+    return default_value
 
   if service_name is None:
     service_name = config['serviceName'] if config is not None and 'serviceName' in config else None
 
   if service_name is None or service_name not in versions:
-    return None
+    return default_value
 
   component_versions = versions[service_name]
   if len(component_versions) == 0:
-    return None
+    return default_value
 
   if component_name is None:
     component_name = config["role"] if config is not None and "role" in config else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 3e4504d..b244af0 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -84,6 +84,12 @@ stack_supports_timeline_state_store = check_stack_feature(StackFeature.TIMELINE_
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 version = default("/commandParams/version", None)
 
+# these are used to render the classpath for picking up Spark classes
+# in the event that spark is not installed, then we must default to the vesrion of YARN installed
+# since it will still load classes from its own spark version
+spark_version = component_version.get_component_repository_version("SPARK", "SPARK_CLIENT", default_value = version)
+spark2_version = component_version.get_component_repository_version("SPARK2", "SPARK2_CLIENT", default_value = version)
+
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index 617dc3b..372ae85 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@ -84,6 +84,12 @@ stack_supports_timeline_state_store = check_stack_feature(StackFeature.TIMELINE_
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 version = default("/commandParams/version", None)
 
+# these are used to render the classpath for picking up Spark classes
+# in the event that spark is not installed, then we must default to the vesrion of YARN installed
+# since it will still load classes from its own spark version
+spark_version = component_version.get_component_repository_version("SPARK", "SPARK_CLIENT", default_value = version)
+spark2_version = component_version.get_component_repository_version("SPARK2", "SPARK2_CLIENT", default_value = version)
+
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
index b6fadcb..29833fb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
@@ -31,13 +31,13 @@
   </property>
   <property>
     <name>yarn.nodemanager.aux-services.spark_shuffle.classpath</name>
-    <value>{{stack_root}}/${hdp.version}/spark/aux/*</value>
+    <value>{{stack_root}}/{{spark_version}}/spark/aux/*</value>
     <description>The auxiliary service classpath to use for Spark</description>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
     <name>yarn.nodemanager.aux-services.spark2_shuffle.classpath</name>
-    <value>{{stack_root}}/${hdp.version}/spark2/aux/*</value>
+    <value>{{stack_root}}/{{spark2_version}}/spark2/aux/*</value>
     <description>The auxiliary service classpath to use for Spark 2</description>
     <on-ambari-upgrade add="false"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index b6f2478..6f409cd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -823,7 +823,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
 
     if self.isServiceDeployed(services, "SPARK"):
       timeline_plugin_classes_values.append('org.apache.spark.deploy.history.yarn.plugin.SparkATSPlugin')
-      timeline_plugin_classpath_values.append(stack_root + "/${hdp.version}/spark/hdpLib/*")
+      timeline_plugin_classpath_values.append(stack_root + "/{{spark_version}}/spark/hdpLib/*")
 
     putYarnSiteProperty('yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes', ",".join(timeline_plugin_classes_values))
     putYarnSiteProperty('yarn.timeline-service.entity-group-fs-store.group-id-plugin-classpath', ":".join(timeline_plugin_classpath_values))

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
index bcb13bc..9311877 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
@@ -162,14 +162,13 @@
               if-key-state="absent" />
           </definition>
 
-
           <definition xsi:type="configure" id="hdp_2_6_yarn_app_classpath_parameterization" summary="Application Classpath Parameterization">
             <type>yarn-site</type>
             <replace key="yarn.application.classpath" find="/usr/hdp/current/hadoop-client" replace-with="{{hadoop_home}}"/>
           </definition>
-
         </changes>
       </component>
+
       <component name="NODEMANAGER">
         <changes>
           <definition xsi:type="configure" id="hdp_2_6_0_0_yarn_nodemanager_llap_mem">
@@ -179,6 +178,17 @@
           </definition>
         </changes>
       </component>
+
+      <component name="APP_TIMELINE_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_yarn_spark_versioned_classpaths">
+            <type>yarn-site</type>
+            <set key="yarn.nodemanager.aux-services.spark_shuffle.classpath" value="{{stack_root}}/{{spark_version}}/spark/aux/*"/>
+            <set key="yarn.nodemanager.aux-services.spark2_shuffle.classpath" value="{{stack_root}}/{{spark2_version}}/spark2/aux/*"/>
+            <replace key="yarn.timeline-service.entity-group-fs-store.group-id-plugin-classpath" find="${hdp.version}/spark/" replace-with="{{spark_version}}/spark/"/>
+          </definition>
+        </changes>
+      </component>
     </service>
 
     <service name="KAFKA">

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index 0355362..2184060 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -147,6 +147,7 @@
         <component>DRUID_SUPERSET</component>
       </service>
 
+
     </group>
 
     <group xsi:type="cluster" name="Backups" title="Perform Backups">
@@ -351,7 +352,12 @@
         <task xsi:type="configure" id="hdp_2_6_yarn_app_classpath_parameterization" supports-patch="true"/>
       </execute-stage>
 
-      <!-- YARN -->
+      <execute-stage service="YARN" component="APP_TIMELINE_SERVER" title="Spark Versioned Classpaths">
+        <task xsi:type="configure" id="hdp_2_6_yarn_spark_versioned_classpaths" supports-patch="true">
+          <summary>Updating the Spark classpath entries to use the specific version of Spark which is installed.</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="YARN" component="NODEMANAGER" title="Apply config changes for YARN NM">
         <task xsi:type="configure" id="hdp_2_6_0_0_yarn_nodemanager_llap_mem">
           <summary>Updating YARN NodeManager config for LLAP</summary>
@@ -414,7 +420,7 @@
       <execute-stage service="TEZ" component="TEZ_CLIENT" title="Updating LD Library Classpath for Tarball" >
         <task xsi:type="configure" id="hdp_2_6_tez_tarball_ld_library" supports-patch="true"/>
       </execute-stage>
-      
+
       <!-- MapR -->
       <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Updating LD Library Classpath for Tarball">
         <task xsi:type="configure" id="hdp_2_6_mapreduce_tarball_ld_library" supports-patch="true"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index 5aa2d20..70f84ab 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -690,6 +690,10 @@
 
     <service name="YARN">
       <component name="APP_TIMELINE_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_yarn_spark_versioned_classpaths" supports-patch="true"/>
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
index 01551c4..d32e2de 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
@@ -20,7 +20,7 @@
 
   <property>
     <name>yarn.nodemanager.aux-services.spark_shuffle.classpath</name>
-    <value>{{stack_root}}/${hdp.version}/spark/aux/*</value>
+    <value>{{stack_root}}/{{spark_version}}/spark/aux/*</value>
     <description>The auxiliary service classpath to use for Spark</description>
     <on-ambari-upgrade add="false"/>
   </property>
@@ -41,7 +41,7 @@
   <!-- These configs were inherited from HDP 2.5 -->
   <property>
     <name>yarn.nodemanager.aux-services.spark2_shuffle.classpath</name>
-    <value>{{stack_root}}/${hdp.version}/spark2/aux/*</value>
+    <value>{{stack_root}}/{{spark2_version}}/spark2/aux/*</value>
     <description>The auxiliary service classpath to use for Spark 2</description>
     <on-ambari-upgrade add="false"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/configuration/yarn-site.xml
index 57d03a8..392d0de 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/configuration/yarn-site.xml
@@ -747,13 +747,13 @@
   </property>
   <property>
     <name>yarn.nodemanager.aux-services.spark_shuffle.classpath</name>
-    <value>{{stack_root}}/${hdp.version}/spark/aux/*</value>
+    <value>{{stack_root}}/{{spark_version}}/spark/aux/*</value>
     <description>The auxiliary service classpath to use for Spark</description>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
     <name>yarn.nodemanager.aux-services.spark2_shuffle.classpath</name>
-    <value>{{stack_root}}/${hdp.version}/spark2/aux/*</value>
+    <value>{{stack_root}}/{{spark2_version}}/spark2/aux/*</value>
     <description>The auxiliary service classpath to use for Spark 2</description>
     <on-ambari-upgrade add="false"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3fa1289e/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 407e78d..cc6bd9d 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -1206,7 +1206,7 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(configurations['hive-interactive-site']['properties']['hive.llap.daemon.queue.name'], 'default')
     self.assertEquals(configurations['yarn-site']['properties']['yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes'],
                       'org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl,org.apache.spark.deploy.history.yarn.plugin.SparkATSPlugin')
-    self.assertEquals(configurations['yarn-site']['properties']['yarn.timeline-service.entity-group-fs-store.group-id-plugin-classpath'], '/usr/hdp/${hdp.version}/spark/hdpLib/*')
+    self.assertEquals(configurations['yarn-site']['properties']['yarn.timeline-service.entity-group-fs-store.group-id-plugin-classpath'], '/usr/hdp/{{spark_version}}/spark/hdpLib/*')
     self.assertTrue('hive-interactive-env' not in configurations)
     self.assertTrue('property_attributes' not in configurations)
 


[20/24] ambari git commit: AMBARI-22480. Validate blueprint does not allow lzo enable without setup with license agreement. Added io.compression.codecs property validation. (mpapirkovskyy)

Posted by rl...@apache.org.
AMBARI-22480. Validate blueprint does not allow lzo enable without setup with license agreement. Added io.compression.codecs property validation. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f6fd9b5c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f6fd9b5c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f6fd9b5c

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: f6fd9b5c5e04998ed0b0631c87c2d4b521e99a44
Parents: e12efe3
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Tue Nov 28 14:51:58 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Wed Nov 29 18:30:08 2017 +0200

----------------------------------------------------------------------
 .../server/topology/BlueprintValidatorImpl.java |  4 +++-
 .../server/topology/BlueprintImplTest.java      | 20 +++++++++++++++++++-
 2 files changed, 22 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f6fd9b5c/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
index 87b5936..fbd0e4b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
@@ -49,6 +49,7 @@ public class BlueprintValidatorImpl implements BlueprintValidator {
   private final Stack stack;
 
   public static final String LZO_CODEC_CLASS_PROPERTY_NAME = "io.compression.codec.lzo.class";
+  public static final String CODEC_CLASSES_PROPERTY_NAME = "io.compression.codecs";
   public static final String LZO_CODEC_CLASS = "com.hadoop.compression.lzo.LzoCodec";
 
   @Inject
@@ -114,7 +115,8 @@ public class BlueprintValidatorImpl implements BlueprintValidator {
             String propertyName = propertyEntry.getKey();
             String propertyValue = propertyEntry.getValue();
             if (propertyValue != null) {
-              if (!gplEnabled && configType.equals("core-site") && propertyName.equals(LZO_CODEC_CLASS_PROPERTY_NAME)
+              if (!gplEnabled && configType.equals("core-site")
+                  && (propertyName.equals(LZO_CODEC_CLASS_PROPERTY_NAME) || propertyName.equals(CODEC_CLASSES_PROPERTY_NAME))
                   && propertyValue.contains(LZO_CODEC_CLASS)) {
                 throw new GPLLicenseNotAcceptedException("Your Ambari server has not been configured to download LZO GPL software. " +
                     "Please refer to documentation to configure Ambari before proceeding.");

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6fd9b5c/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
index 6d3179e..dd0adcc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
@@ -299,7 +299,24 @@ public class BlueprintImplTest {
   }
 
   @Test(expected = GPLLicenseNotAcceptedException.class)
-  public void testValidateConfigurations__gplIsNotAllowed() throws InvalidTopologyException,
+  public void testValidateConfigurations__gplIsNotAllowedCodecsProperty() throws InvalidTopologyException,
+      GPLLicenseNotAcceptedException, NoSuchFieldException, IllegalAccessException {
+    Map<String, Map<String, String>> lzoProperties = new HashMap<>();
+    lzoProperties.put("core-site", new HashMap<String, String>(){{
+      put(BlueprintValidatorImpl.CODEC_CLASSES_PROPERTY_NAME, "OtherCodec, " + BlueprintValidatorImpl.LZO_CODEC_CLASS);
+    }});
+    Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
+
+    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(false);
+    replay(stack, group1, group2, serverConfig);
+
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null);
+    blueprint.validateRequiredProperties();
+    verify(stack, group1, group2, serverConfig);
+  }
+
+  @Test(expected = GPLLicenseNotAcceptedException.class)
+  public void testValidateConfigurations__gplIsNotAllowedLZOProperty() throws InvalidTopologyException,
       GPLLicenseNotAcceptedException, NoSuchFieldException, IllegalAccessException {
     Map<String, Map<String, String>> lzoProperties = new HashMap<>();
     lzoProperties.put("core-site", new HashMap<String, String>(){{
@@ -321,6 +338,7 @@ public class BlueprintImplTest {
     Map<String, Map<String, String>> lzoProperties = new HashMap<>();
     lzoProperties.put("core-site", new HashMap<String, String>(){{
       put(BlueprintValidatorImpl.LZO_CODEC_CLASS_PROPERTY_NAME, BlueprintValidatorImpl.LZO_CODEC_CLASS);
+      put(BlueprintValidatorImpl.CODEC_CLASSES_PROPERTY_NAME, "OtherCodec, " + BlueprintValidatorImpl.LZO_CODEC_CLASS);
     }});
     Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
 


[18/24] ambari git commit: AMBARI-22484. Stack advisor should disallow lzo enable without accepting license agreement. (mpapirkovskyy)

Posted by rl...@apache.org.
AMBARI-22484. Stack advisor should disallow lzo enable without accepting license agreement. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/41853a10
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/41853a10
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/41853a10

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 41853a1041a848b69384592c49f8f5b539a5c4f9
Parents: 7750398
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Wed Nov 29 16:48:11 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Wed Nov 29 18:30:08 2017 +0200

----------------------------------------------------------------------
 .../stackadvisor/StackAdvisorRequest.java       | 11 +++++
 .../commands/StackAdvisorCommand.java           |  2 +
 .../ambari/server/controller/AmbariServer.java  |  3 +-
 .../internal/StackAdvisorResourceProvider.java  |  9 +++-
 .../GPLLicenseNotAcceptedException.java         | 28 +++++++++++
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 17 +++++++
 .../ValidationResourceProviderTest.java         |  4 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   | 51 ++++++++++++++++++--
 8 files changed, 116 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
index cd26c56..62b8d15 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
@@ -51,6 +51,7 @@ public class StackAdvisorRequest {
   private Set<RecommendationResponse.ConfigGroup> configGroups;
   private Map<String, String> userContext = new HashMap<>();
   private Map<String, Object> ldapConfig = new HashMap<>();
+  private Boolean gplLicenseAccepted;
 
   public String getStackName() {
     return stackName;
@@ -122,6 +123,10 @@ public class StackAdvisorRequest {
     this.configGroups = configGroups;
   }
 
+  public Boolean getGplLicenseAccepted() {
+    return gplLicenseAccepted;
+  }
+
   private StackAdvisorRequest(String stackName, String stackVersion) {
     this.stackName = stackName;
     this.stackVersion = stackVersion;
@@ -194,6 +199,12 @@ public class StackAdvisorRequest {
       return this;
     }
 
+    public StackAdvisorRequestBuilder withGPLLicenseAccepted(
+        Boolean gplLicenseAccepted) {
+      this.instance.gplLicenseAccepted = gplLicenseAccepted;
+      return this;
+    }
+
     public StackAdvisorRequestBuilder withLdapConfig(Map<String, Object> ldapConfig) {
       Preconditions.checkNotNull(ldapConfig);
       this.instance.ldapConfig = ldapConfig;

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
index 1b89c4f..4ec15ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
@@ -107,6 +107,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
   private static final String CONFIGURATIONS_PROPERTY = "configurations";
   private static final String CHANGED_CONFIGURATIONS_PROPERTY = "changed-configurations";
   private static final String USER_CONTEXT_PROPERTY = "user-context";
+  private static final String GPL_LICENSE_ACCEPTED = "gpl-license-accepted";
   private static final String AMBARI_SERVER_CONFIGURATIONS_PROPERTY = "ambari-server-properties";
 
   private File recommendationsDir;
@@ -269,6 +270,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
 
     JsonNode userContext = mapper.valueToTree(request.getUserContext());
     root.put(USER_CONTEXT_PROPERTY, userContext);
+    root.put(GPL_LICENSE_ACCEPTED, request.getGplLicenseAccepted());
   }
 
   private void populateConfigGroups(ObjectNode root,

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 6ceed4a..863313b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -908,7 +908,8 @@ public class AmbariServer {
     PersistKeyValueService.init(injector.getInstance(PersistKeyValueImpl.class));
     KeyService.init(injector.getInstance(PersistKeyValueImpl.class));
     BootStrapResource.init(injector.getInstance(BootStrapImpl.class));
-    StackAdvisorResourceProvider.init(injector.getInstance(StackAdvisorHelper.class));
+    StackAdvisorResourceProvider.init(injector.getInstance(StackAdvisorHelper.class),
+        injector.getInstance(Configuration.class));
     StageUtils.setGson(injector.getInstance(Gson.class));
     StageUtils.setTopologyManager(injector.getInstance(TopologyManager.class));
     StageUtils.setConfiguration(injector.getInstance(Configuration.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
index 1ea664c..67c177e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
@@ -36,6 +36,7 @@ import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestType;
 import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource.Type;
@@ -82,12 +83,14 @@ public abstract class StackAdvisorResourceProvider extends ReadOnlyResourceProvi
   private static final String CONFIG_GROUPS_HOSTS_PROPERTY = "hosts";
 
   protected static StackAdvisorHelper saHelper;
+  protected static Configuration configuration;
   protected static final String USER_CONTEXT_OPERATION_PROPERTY = "user_context/operation";
   protected static final String USER_CONTEXT_OPERATION_DETAILS_PROPERTY = "user_context/operation_details";
 
   @Inject
-  public static void init(StackAdvisorHelper instance) {
+  public static void init(StackAdvisorHelper instance, Configuration serverConfig) {
     saHelper = instance;
+    configuration = serverConfig;
   }
 
   protected StackAdvisorResourceProvider(Set<String> propertyIds, Map<Type, String> keyPropertyIds,
@@ -133,6 +136,7 @@ public abstract class StackAdvisorResourceProvider extends ReadOnlyResourceProvi
           hgHostsMap);
       Map<String, Map<String, Map<String, String>>> configurations = calculateConfigurations(request);
       Map<String, String> userContext = readUserContext(request);
+      Boolean gplLicenseAccepted = configuration.getGplLicenseAccepted();
 
       List<ChangedConfigInfo> changedConfigurations =
         requestType == StackAdvisorRequestType.CONFIGURATION_DEPENDENCIES ?
@@ -147,7 +151,8 @@ public abstract class StackAdvisorResourceProvider extends ReadOnlyResourceProvi
         withConfigurations(configurations).
         withConfigGroups(configGroups).
         withChangedConfigurations(changedConfigurations).
-        withUserContext(userContext).build();
+        withUserContext(userContext).
+        withGPLLicenseAccepted(gplLicenseAccepted).build();
     } catch (Exception e) {
       LOG.warn("Error occurred during preparation of stack advisor request", e);
       Response response = Response.status(Status.BAD_REQUEST)

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/main/java/org/apache/ambari/server/topology/GPLLicenseNotAcceptedException.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/GPLLicenseNotAcceptedException.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/GPLLicenseNotAcceptedException.java
new file mode 100644
index 0000000..b444d01
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/GPLLicenseNotAcceptedException.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+/**
+ * Indicates an not permitted LZO usage.
+ */
+public class GPLLicenseNotAcceptedException extends Exception {
+  public GPLLicenseNotAcceptedException(String s) {
+    super(s);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index a194332..bfa2f5a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -351,6 +351,22 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     # recommendations for "hadoop.proxyuser.*.hosts", "hadoop.proxyuser.*.groups" properties in core-site
     self.recommendHadoopProxyUsers(configurations, services, hosts)
 
+  def getLZOSupportValidationItems(self, properties, services):
+    services_list = self.get_services_list(services)
+
+    if "HDFS" in services_list:
+      lzo_allowed = services["gpl-license-accepted"]
+      property_name = "io.compression.codec.lzo.class"
+      if property_name in properties:
+        property_value = properties.get(property_name)
+        if not lzo_allowed and "com.hadoop.compression.lzo.LzoCodec" in property_value:
+          return [{"config-name": property_name, "item": self.getErrorItem(
+            "Your Ambari Server has not been configured to download LZO and install it. "
+            "LZO is GPL software and requires you to accept a license prior to use. "
+            "Please refer to this documentation to configure Ambari before proceeding.")}]
+
+    return []
+
   def recommendHbaseConfigurations(self, configurations, clusterData, services, hosts):
     # recommendations for HBase env config
 
@@ -726,6 +742,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     validationItems = []
     validationItems.extend(self.getHadoopProxyUsersValidationItems(properties, services, hosts, configurations))
     validationItems.extend(self.getAmbariProxyUsersForHDFSValidationItems(properties, services))
+    validationItems.extend(self.getLZOSupportValidationItems(properties, services))
     return self.toConfigurationValidationProblems(validationItems, "core-site")
 
   def validatorOneDataDirPerPartition(self, properties, propertyName, services, hosts, clusterEnv):

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
index c639d1f..42d4770 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
@@ -33,6 +33,7 @@ import java.util.Set;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorHelper;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.validations.ValidationResponse;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.RequestStatus;
@@ -53,13 +54,14 @@ public class ValidationResourceProviderTest {
     doReturn(stackAdvisorRequest).when(provider).prepareStackAdvisorRequest(request);
 
     StackAdvisorHelper saHelper = mock(StackAdvisorHelper.class);
+    Configuration configuration = mock(Configuration.class);
 
     ValidationResponse response = mock(ValidationResponse.class);
     Version version = mock(Version.class);
     doReturn(3).when(response).getId();
     doReturn(version).when(response).getVersion();
     doReturn(response).when(saHelper).validate(any(StackAdvisorRequest.class));
-    ValidationResourceProvider.init(saHelper);
+    ValidationResourceProvider.init(saHelper, configuration);
 
     RequestStatus status = provider.createResources(request);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/41853a10/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 38d6ecd..6c774af 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -2521,7 +2521,8 @@ class TestHDP206StackAdvisor(TestCase):
                   'hadoop.proxyuser.hdfs-user.hosts': '*',
                   'hadoop.proxyuser.hdfs-user.groups': '*',
                   'hadoop.proxyuser.yarn-user.hosts': 'host1,host2',
-                  'hadoop.proxyuser.yarn-user.groups': '*'}
+                  'hadoop.proxyuser.yarn-user.groups': '*',
+                  'io.compression.codec.lzo.class': 'com.hadoop.compression.lzo.LzoCodec'}
     services = {
       'services':  [
         { 'StackServices': {'service_name': 'HDFS'}},
@@ -2537,7 +2538,8 @@ class TestHDP206StackAdvisor(TestCase):
         }
       ],
       'ambari-server-properties': {'ambari-server.user': 'ambari-user'},
-      'configurations': configurations
+      'configurations': configurations,
+      "gpl-license-accepted": True
     }
     hosts = {
       'items' : [
@@ -2551,7 +2553,21 @@ class TestHDP206StackAdvisor(TestCase):
     res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, recommendedDefaults, configurations, services, hosts)
     self.assertEquals(res, res_expected)
 
-    # 2) fail: test filter function: two RESOURCE_MANAGERs, hadoop.proxyuser.yarn-user.hosts is expected to be set
+    # 2) fail: gpl is not allowed
+    services["gpl-license-accepted"] = False
+    res_expected = [{'config-type': 'core-site',
+                     'message': 'Your Ambari Server has not been configured to download LZO and install it. '
+                                'LZO is GPL software and requires you to accept a license prior to use. '
+                                'Please refer to this documentation to configure Ambari before proceeding.',
+                     'type': 'configuration',
+                     'config-name': 'io.compression.codec.lzo.class',
+                     'level': 'ERROR'}]
+
+    res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, {}, configurations, services, hosts)
+    self.assertEquals(res, res_expected)
+    services["gpl-license-accepted"] = True
+
+    # 3) fail: test filter function: two RESOURCE_MANAGERs, hadoop.proxyuser.yarn-user.hosts is expected to be set
     del properties['hadoop.proxyuser.yarn-user.hosts']
     res_expected = [{'config-name': 'hadoop.proxyuser.yarn-user.hosts',
                      'config-type': 'core-site',
@@ -2561,13 +2577,13 @@ class TestHDP206StackAdvisor(TestCase):
     res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, recommendedDefaults, configurations, services, hosts)
     self.assertEquals(res, res_expected)
 
-    # 3) ok: test filter function: only one RESOURCE_MANAGER
+    # 4) ok: test filter function: only one RESOURCE_MANAGER
     services['services'][1]['components'][0]['StackServiceComponents']['hostnames'] = ["host1"]
     res_expected = []
     res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, recommendedDefaults, configurations, services, hosts)
     self.assertEquals(res, res_expected)
 
-    # 4) fail: some proxyusers are empty or absent:
+    # 5) fail: some proxyusers are empty or absent:
     del properties['hadoop.proxyuser.ambari-user.hosts']
     properties['hadoop.proxyuser.hdfs-user.groups'] = ''
     res_expected = [{'config-name': 'hadoop.proxyuser.hdfs-user.groups',
@@ -2627,6 +2643,31 @@ class TestHDP206StackAdvisor(TestCase):
     res = self.stackAdvisor.getHadoopProxyUsers(services, hosts, configurations)
     self.assertEquals(res, res_expected)
 
+  # def test_validateHDFSConfigurationsCoreSite(self):
+  #
+  #   configurations = {}
+  #   services = {"gpl-license-accepted": True, "services": [{"StackServices": {"service_name": "HDFS"}}], 'ambari-server-properties': {'ambari-server.user': 'ambari-user'}}
+  #
+  #   # 1) ok: gpl is allowed
+  #   properties = {'io.compression.codec.lzo.class': 'com.hadoop.compression.lzo.LzoCodec'}
+  #   res_expected = []
+  #
+  #   res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, {}, configurations, services, '')
+  #   self.assertEquals(res, res_expected)
+  #
+  #   # 2) fail: gpl is not allowed
+  #   services["gpl-license-accepted"] = False
+  #   res_expected = [{'config-type': 'core-site',
+  #                    'message': 'Your Ambari Server has not been configured to download LZO and install it. '
+  #                               'LZO is GPL software and requires you to accept a license prior to use. '
+  #                               'Please refer to this documentation to configure Ambari before proceeding.',
+  #                    'type': 'configuration',
+  #                    'config-name': 'io.compression.codec.lzo.class',
+  #                    'level': 'ERROR'}]
+  #
+  #   res = self.stackAdvisor.validateHDFSConfigurationsCoreSite(properties, {}, configurations, services, '')
+  #   self.assertEquals(res, res_expected)
+
   def test_validateOneDataDirPerPartition(self):
     recommendedDefaults = {
       'dfs.datanode.du.reserved': '1024'


[02/24] ambari git commit: AMBARI-22529. PERF stack fixes (aonishuk)

Posted by rl...@apache.org.
AMBARI-22529. PERF stack fixes (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/530f1d5e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/530f1d5e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/530f1d5e

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 530f1d5e34eb5eb5e3e509bd9934a590bed64ac9
Parents: b832f70
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Nov 28 15:30:10 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Nov 28 15:30:10 2017 +0200

----------------------------------------------------------------------
 .../main/java/org/apache/ambari/server/state/ServiceInfo.java | 2 +-
 .../stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py      | 7 +++++++
 2 files changed, 8 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/530f1d5e/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index f1c63bf..c0342eb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -87,7 +87,7 @@ public class ServiceInfo implements Validable{
     JAVA
   }
   @XmlElement(name="service_advisor_type")
-  private ServiceAdvisorType serviceAdvisorType = null;
+  private ServiceAdvisorType serviceAdvisorType = ServiceAdvisorType.PYTHON;
 
   @XmlTransient
   private List<PropertyInfo> properties;

http://git-wip-us.apache.org/repos/asf/ambari/blob/530f1d5e/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
index 833fdbc..513b994 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
@@ -23,6 +23,7 @@ from resource_management.core.resources.system import Directory, File, Execute
 from resource_management.libraries.script import Hook
 
 AMBARI_AGENT_CACHE_DIR = 'AMBARI_AGENT_CACHE_DIR'
+DEFAULT_AMBARI_AGENT_CACHE_DIR = '/var/lib/ambari-agent/cache/'
 
 BEFORE_INSTALL_SCRIPTS = "hooks/before-INSTALL/scripts"
 STACK = "PERF/1.0"
@@ -38,6 +39,12 @@ class BeforeInstallHook(Hook):
     self.run_custom_hook('before-ANY')
     print "Before Install Hook"
     cache_dir = self.extrakt_var_from_pythonpath(AMBARI_AGENT_CACHE_DIR)
+
+    # this happens if PythonExecutor.py.sed hack was not done.
+    if not cache_dir:
+      print "WARN: Cache dir for the agent could not be detected. Using default cache dir"
+      cache_dir = DEFAULT_AMBARI_AGENT_CACHE_DIR
+
     conf_select = os.path.join(cache_dir, CONF_SELECT_PY)
     dist_select = os.path.join(cache_dir, DISTRO_SELECT_PY)
     try: