You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2016/04/20 16:17:11 UTC

[1/2] ambari git commit: Revert "AMBARI-15921 By default, YARN does not emit audit logs into a separate log file (dlysnichenko)" - intended only for trunk

Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 881af59aa -> e244ee5f0
  refs/heads/branch-2.2.2 a564058a4 -> 42ed94718


Revert "AMBARI-15921 By default, YARN does not emit audit logs into a separate log file (dlysnichenko)" - intended only for trunk

This reverts commit 01b2af13d8f4efad60882e498f8bd92e302b162f.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e244ee5f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e244ee5f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e244ee5f

Branch: refs/heads/branch-2.2
Commit: e244ee5f0834f6925c58be6495244f639a94dd6a
Parents: 881af59
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Apr 20 17:14:33 2016 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Wed Apr 20 17:14:33 2016 +0300

----------------------------------------------------------------------
 .../YARN/2.1.0.2.0/configuration/yarn-log4j.xml | 20 --------------------
 .../services/YARN/configuration/yarn-log4j.xml  | 20 --------------------
 2 files changed, 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e244ee5f/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
index 08fef43..bbd2a3f 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
@@ -65,26 +65,6 @@ log4j.appender.JSA.DatePattern=.yyyy-MM-dd
 log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
 log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=${yarn.server.resourcemanager.appsummary.logger}
 log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false
-
-# Audit logging for ResourceManager
-rm.audit.logger=INFO,RMAUDIT
-log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger=${rm.audit.logger}
-log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger=false
-log4j.appender.RMAUDIT=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.RMAUDIT.File=${yarn.log.dir}/rm-audit.log
-log4j.appender.RMAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.RMAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.RMAUDIT.DatePattern=.yyyy-MM-dd
-
-# Audit logging for NodeManager
-nm.audit.logger=INFO,NMAUDIT
-log4j.logger.org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger=${nm.audit.logger}
-log4j.additivity.org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger=false
-log4j.appender.NMAUDIT=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.NMAUDIT.File=${yarn.log.dir}/nm-audit.log
-log4j.appender.NMAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.NMAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.NMAUDIT.DatePattern=.yyyy-MM-dd
     </value>
     <value-attributes>
       <show-property-name>false</show-property-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e244ee5f/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
index 728f0a6..b52af80 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
@@ -74,26 +74,6 @@ log4j.appender.EWMA=org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender
 log4j.appender.EWMA.cleanupInterval=${yarn.ewma.cleanupInterval}
 log4j.appender.EWMA.messageAgeLimitSeconds=${yarn.ewma.messageAgeLimitSeconds}
 log4j.appender.EWMA.maxUniqueMessages=${yarn.ewma.maxUniqueMessages}
-
-# Audit logging for ResourceManager
-rm.audit.logger=INFO,RMAUDIT
-log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger=${rm.audit.logger}
-log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger=false
-log4j.appender.RMAUDIT=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.RMAUDIT.File=${yarn.log.dir}/rm-audit.log
-log4j.appender.RMAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.RMAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.RMAUDIT.DatePattern=.yyyy-MM-dd
-
-# Audit logging for NodeManager
-nm.audit.logger=INFO,NMAUDIT
-log4j.logger.org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger=${nm.audit.logger}
-log4j.additivity.org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger=false
-log4j.appender.NMAUDIT=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.NMAUDIT.File=${yarn.log.dir}/nm-audit.log
-log4j.appender.NMAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.NMAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-log4j.appender.NMAUDIT.DatePattern=.yyyy-MM-dd
     </value>
     <value-attributes>
       <show-property-name>false</show-property-name>


[2/2] ambari git commit: AMBARI-15911. Choose Authorization [Hive]' with value None was added after upgrade, but should has value according to security type (after upgrade secured cluster from 1.7.0, 2.0.1, 2.0.2 etc) to 2.2.2.0] (dgrinenko via dlysnichen

Posted by dm...@apache.org.
AMBARI-15911. Choose Authorization [Hive]' with value None was added after upgrade, but should has value according to security type (after upgrade secured cluster from 1.7.0,2.0.1, 2.0.2 etc) to 2.2.2.0] (dgrinenko via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/42ed9471
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/42ed9471
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/42ed9471

Branch: refs/heads/branch-2.2.2
Commit: 42ed94718cd433711fa7692155c4a5299af21f50
Parents: a564058
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Apr 15 16:59:47 2016 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Wed Apr 20 17:17:29 2016 +0300

----------------------------------------------------------------------
 .../server/upgrade/AbstractUpgradeCatalog.java  |  8 +--
 .../server/upgrade/UpgradeCatalog210.java       | 37 +++++++---
 .../server/upgrade/UpgradeCatalog220.java       |  2 +-
 .../server/upgrade/UpgradeCatalog210Test.java   | 72 ++++++++++++++++++++
 4 files changed, 103 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/42ed9471/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 7be2dfe..b7665ce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -192,18 +192,18 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     return doc;
   }
 
-  protected static boolean isRangerPluginEnabled(Cluster cluster) {
-    boolean isRangerPluginEnabled = false;
+  protected static boolean isRangerKnoxPluginEnabled(Cluster cluster) {
+    boolean isRangerKnoxPluginEnabled = false;
     if (cluster != null) {
       Config rangerKnoxPluginProperties = cluster.getDesiredConfigByType(CONFIGURATION_TYPE_RANGER_KNOX_PLUGIN_PROPERTIES);
       if (rangerKnoxPluginProperties != null) {
         String rangerKnoxPluginEnabled = rangerKnoxPluginProperties.getProperties().get(PROPERTY_RANGER_KNOX_PLUGIN_ENABLED);
         if (StringUtils.isNotEmpty(rangerKnoxPluginEnabled)) {
-          isRangerPluginEnabled = rangerKnoxPluginEnabled.toLowerCase().equals("yes");
+          isRangerKnoxPluginEnabled =  "yes".equalsIgnoreCase(rangerKnoxPluginEnabled);
         }
       }
     }
-    return isRangerPluginEnabled;
+    return isRangerKnoxPluginEnabled;
   }
 
   protected static class VersionComparator implements Comparator<UpgradeCatalog> {

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ed9471/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index faf4b96..4c1f434 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -1363,14 +1363,11 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
                     && RangerHiveConfig.getProperties().get("ranger-hive-plugin-enabled").equalsIgnoreCase("yes")) {
               newHiveEnvProperties.put("hive_security_authorization", "Ranger");
               newHiveServerProperties.put("hive.security.authorization.enabled", "true");
-            } else {
-              newHiveEnvProperties.put("hive_security_authorization", "None");
             }
             boolean updateProperty = cluster.getDesiredConfigByType("hive-env").getProperties().containsKey("hive_security_authorization");
             updateConfigurationPropertiesForCluster(cluster, "hive-env", newHiveEnvProperties, updateProperty, true);
             updateConfigurationPropertiesForCluster(cluster, "hiveserver2-site", newHiveServerProperties, updateProperty, true);
-            updateConfigurationPropertiesForCluster(cluster, "ranger-hive-plugin-properties", new HashMap<String, String>(),
-                    removeRangerHiveProperties, false, true);
+            removeConfigurationPropertiesFromCluster(cluster, "ranger-hive-plugin-properties", removeRangerHiveProperties);
           }
         }
       }
@@ -1479,6 +1476,13 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
       if (clusterMap != null && !clusterMap.isEmpty()) {
         for (final Cluster cluster : clusterMap.values()) {
           String content = null;
+          String hive_server2_auth = "";
+          if (cluster.getDesiredConfigByType("hive-site") != null &&
+              cluster.getDesiredConfigByType("hive-site").getProperties().containsKey("hive.server2.authentication")) {
+
+            hive_server2_auth = cluster.getDesiredConfigByType("hive-site").getProperties().get("hive.server2.authentication");
+          }
+
           if(cluster.getDesiredConfigByType("hive-env") != null) {
             Map<String, String> hiveEnvProps = new HashMap<String, String>();
             Set<String> hiveServerSiteRemoveProps = new HashSet<String>();
@@ -1495,22 +1499,32 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
             if (!cluster.getDesiredConfigByType("hive-env").getProperties().containsKey("hive.metastore.heapsize")) {
               hiveEnvProps.put("hive.metastore.heapsize", "1024");
             }
-            if (cluster.getDesiredConfigByType("hive-env").getProperties().containsKey("hive_security_authorization") &&
-                    "none".equalsIgnoreCase(cluster.getDesiredConfigByType("hive-env").getProperties().get("hive_security_authorization"))) {
+
+            boolean isHiveSecurityAuthPresent = cluster.getDesiredConfigByType("hive-env").getProperties().containsKey("hive_security_authorization");
+            String hiveSecurityAuth="";
+
+            if ("kerberos".equalsIgnoreCase(hive_server2_auth) && cluster.getServices().containsKey("KERBEROS")){
+              hiveSecurityAuth = "SQLStdAuth";
+              isHiveSecurityAuthPresent = true;
+              hiveEnvProps.put("hive_security_authorization", hiveSecurityAuth);
+            } else {
+              if (isHiveSecurityAuthPresent) {
+                hiveSecurityAuth = cluster.getDesiredConfigByType("hive-env").getProperties().get("hive_security_authorization");
+              }
+            }
+
+            if (isHiveSecurityAuthPresent && "none".equalsIgnoreCase(hiveSecurityAuth)) {
               hiveServerSiteRemoveProps.add("hive.security.authorization.manager");
               hiveServerSiteRemoveProps.add("hive.security.authenticator.manager");
             }
             updateConfigurationPropertiesForCluster(cluster, "hive-env", hiveEnvProps, true, true);
-            updateConfigurationPropertiesForCluster(cluster, "hiveserver2-site", new HashMap<String, String>(), hiveServerSiteRemoveProps, false, true);
+            removeConfigurationPropertiesFromCluster(cluster, "hiveserver2-site", hiveServerSiteRemoveProps);
           }
 
           if(cluster.getDesiredConfigByType("hive-site") != null) {
             Set<String> hiveSiteRemoveProps = new HashSet<String>();
             Map<String, String> hiveSiteAddProps = new HashMap<String, String>();
-            String hive_server2_auth = "";
-            if (cluster.getDesiredConfigByType("hive-site").getProperties().containsKey("hive.server2.authentication")) {
-              hive_server2_auth = cluster.getDesiredConfigByType("hive-site").getProperties().get("hive.server2.authentication");
-            }
+
             if (!"pam".equalsIgnoreCase(hive_server2_auth)) {
               hiveSiteRemoveProps.add("hive.server2.authentication.pam.services");
             } else {
@@ -1532,6 +1546,7 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
             } else {
               hiveSiteAddProps.put("hive.server2.authentication.kerberos.keytab", "");
               hiveSiteAddProps.put("hive.server2.authentication.kerberos.principal", "");
+
             }
             
             

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ed9471/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
index ac6b3c5..0738d70 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
@@ -383,7 +383,7 @@ public class UpgradeCatalog220 extends AbstractUpgradeCatalog {
               if (!authorizationProviderExists) {
                 NodeList nodeList = root.getElementsByTagName("gateway");
                 if (nodeList != null && nodeList.getLength() > 0) {
-                  boolean rangerPluginEnabled = isRangerPluginEnabled(cluster);
+                  boolean rangerPluginEnabled = isRangerKnoxPluginEnabled(cluster);
 
                   Node gatewayNode = nodeList.item(0);
                   Element newProvider = topologyXml.createElement("provider");

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ed9471/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
index 83018a2..1e47a91 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
@@ -28,6 +28,7 @@ import com.google.inject.persist.PersistService;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
@@ -60,6 +61,7 @@ import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
+import org.easymock.CaptureType;
 import org.easymock.EasyMockSupport;
 import org.junit.After;
 import org.junit.Assert;
@@ -377,6 +379,76 @@ public class UpgradeCatalog210Test {
   }
 
   @Test
+  public void testUpdateHiveConfigsWithKerberos() throws Exception {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
+    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+    final ServiceConfigVersionResponse mockServiceConfigVersionResponse = easyMockSupport.createNiceMock(ServiceConfigVersionResponse.class);
+    final Config mockHiveEnv = easyMockSupport.createNiceMock(Config.class);
+    final Config mockHiveSite = easyMockSupport.createNiceMock(Config.class);
+    final Config mockHiveServerSite = easyMockSupport.createNiceMock(Config.class);
+
+    final Map<String, String> propertiesExpectedHiveEnv = new HashMap<String, String>();
+    final Map<String, String> propertiesExpectedHiveSite = new HashMap<String, String>() {{
+      put("hive.server2.authentication", "kerberos");
+    }};
+    final Map<String, String> propertiesExpectedHiveServerSite = new HashMap<String, String>() {{
+    }};
+    final Map<String, Service> servicesExpected = new HashMap<String, Service>(){{
+      put("KERBEROS", null);
+    }};
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(ConfigHelper.class).toInstance(mockConfigHelper);
+        bind(Clusters.class).toInstance(mockClusters);
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+      }
+    });
+
+    final UpgradeCatalog210 upgradeCatalog210 =  mockInjector.getInstance(UpgradeCatalog210.class);
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).once();
+
+    Capture<ConfigurationRequest> configCreation = Capture.newInstance(CaptureType.ALL);
+
+    expect(mockClusterExpected.getDesiredConfigByType("hive-env")).andReturn(mockHiveEnv).atLeastOnce();
+    expect(mockClusterExpected.getDesiredConfigByType("hiveserver2-site")).andReturn(mockHiveServerSite).atLeastOnce();
+    expect(mockHiveEnv.getProperties()).andReturn(propertiesExpectedHiveEnv).anyTimes();
+    expect(mockHiveServerSite.getProperties()).andReturn(propertiesExpectedHiveServerSite).anyTimes();
+    expect(mockClusterExpected.getDesiredConfigByType("hive-site")).andReturn(mockHiveSite).atLeastOnce();
+    expect(mockHiveSite.getProperties()).andReturn(propertiesExpectedHiveSite).anyTimes();
+    expect(mockClusterExpected.getServices()).andReturn(servicesExpected).atLeastOnce();
+    expect(mockAmbariManagementController.createConfiguration(capture(configCreation))).andReturn(null).atLeastOnce();
+
+    easyMockSupport.replayAll();
+    upgradeCatalog210.updateHiveConfigs();
+    easyMockSupport.verifyAll();
+
+    Assert.assertEquals(2, configCreation.getValues().size());
+
+    boolean hiveEnvFound = false;
+    for (ConfigurationRequest cr: configCreation.getValues()){
+      if (cr.getType().equalsIgnoreCase("hive-env")){
+        hiveEnvFound = true;
+        Assert.assertTrue(cr.getProperties().containsKey("hive_security_authorization"));
+        Assert.assertTrue("sqlstdauth".equalsIgnoreCase(cr.getProperties().get("hive_security_authorization")));
+      }
+    }
+
+    Assert.assertTrue(hiveEnvFound);
+  }
+
+  @Test
   public void TestUpdateHiveEnvContent() {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
     final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);