You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/09/23 13:19:14 UTC
[01/16] ambari git commit: Revert "AMBARI-18406. Create
authentication filter to perform Kerberos authentication for Ambari (rlevas)"
Repository: ambari
Updated Branches:
refs/heads/branch-dev-patch-upgrade c0cee00e3 -> 0bbdb4f3a
Revert "AMBARI-18406. Create authentication filter to perform Kerberos authentication for Ambari (rlevas)"
This reverts commit b4320b5a8d29b812e9fe86da69a219a17d5e4ea7.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dcf779d2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dcf779d2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dcf779d2
Branch: refs/heads/branch-dev-patch-upgrade
Commit: dcf779d28e511b07821e6f54702b918a87b22d02
Parents: 6038e01
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Sep 21 10:42:10 2016 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Sep 21 10:42:54 2016 -0400
----------------------------------------------------------------------
ambari-project/pom.xml | 5 -
ambari-server/pom.xml | 6 +-
.../server/configuration/Configuration.java | 30 +--
.../server/controller/KerberosHelper.java | 2 -
.../server/controller/KerberosHelperImpl.java | 18 +-
.../server/security/AmbariEntryPoint.java | 15 +-
.../AmbariAuthToLocalUserDetailsService.java | 139 -----------
.../AmbariKerberosAuthenticationFilter.java | 172 --------------
.../kerberos/AmbariKerberosTicketValidator.java | 93 --------
.../AbstractPrepareKerberosServerAction.java | 10 +-
.../ConfigureAmbariIdentitiesServerAction.java | 235 -------------------
.../ConfigureAmbariIndetityServerAction.java | 208 ++++++++++++++++
.../kerberos/CreatePrincipalsServerAction.java | 5 +-
.../kerberos/KerberosServerAction.java | 12 +-
.../webapp/WEB-INF/spring-security.xml | 32 +--
.../server/controller/KerberosHelperTest.java | 6 +-
...AmbariAuthToLocalUserDetailsServiceTest.java | 92 --------
.../AmbariKerberosAuthenticationFilterTest.java | 133 -----------
.../AmbariKerberosTicketValidatorTest.java | 49 ----
19 files changed, 241 insertions(+), 1021 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-project/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-project/pom.xml b/ambari-project/pom.xml
index 4f045fe..2615b46 100644
--- a/ambari-project/pom.xml
+++ b/ambari-project/pom.xml
@@ -132,11 +132,6 @@
<version>3.1.2.RELEASE</version>
</dependency>
<dependency>
- <groupId>org.springframework.security.kerberos</groupId>
- <artifactId>spring-security-kerberos-web</artifactId>
- <version>1.0.1.RELEASE</version>
- </dependency>
- <dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-mock</artifactId>
<version>2.0.8</version>
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 5731c9d..197c992 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -44,7 +44,7 @@
<stacksSrcLocation>src/main/resources/stacks/${stack.distribution}</stacksSrcLocation>
<tarballResourcesFolder>src/main/resources</tarballResourcesFolder>
<skipPythonTests>false</skipPythonTests>
- <hadoop.version>2.7.2</hadoop.version>
+ <hadoop.version>2.7.1</hadoop.version>
<empty.dir>src/main/package</empty.dir> <!-- any directory in project with not very big amount of files (not to waste-load them) -->
<el.log>ALL</el.log> <!-- log level for EclipseLink eclipselink-staticweave-maven-plugin -->
</properties>
@@ -986,10 +986,6 @@
<artifactId>spring-security-web</artifactId>
</dependency>
<dependency>
- <groupId>org.springframework.security.kerberos</groupId>
- <artifactId>spring-security-kerberos-web</artifactId>
- </dependency>
- <dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-mock</artifactId>
<scope>test</scope>
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index f1058b6..b2fa4c0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -5284,11 +5284,9 @@ public class Configuration {
try {
orderedUserTypes.add(UserType.valueOf(type.toUpperCase()));
} catch (IllegalArgumentException e) {
- String message = String.format("While processing ordered user types from %s, " +
+ throw new IllegalArgumentException(String.format("While processing ordered user types from %s, " +
"%s was found to be an invalid user type.",
- KERBEROS_AUTH_USER_TYPES.getKey(), type);
- LOG.error(message);
- throw new IllegalArgumentException(message, e);
+ KERBEROS_AUTH_USER_TYPES.getKey(), type), e);
}
}
}
@@ -5322,11 +5320,9 @@ public class Configuration {
// Validate the SPNEGO principal name to ensure it was set.
// Log any found issues.
if (StringUtils.isEmpty(kerberosAuthProperties.getSpnegoPrincipalName())) {
- String message = String.format("The SPNEGO principal name specified in %s is empty. " +
+ throw new IllegalArgumentException(String.format("The SPNEGO principal name specified in %s is empty. " +
"This will cause issues authenticating users using Kerberos.",
- KERBEROS_AUTH_SPNEGO_PRINCIPAL.getKey());
- LOG.error(message);
- throw new IllegalArgumentException(message);
+ KERBEROS_AUTH_SPNEGO_PRINCIPAL.getKey()));
}
// Get the SPNEGO keytab file. There is nothing special to process for this value.
@@ -5335,25 +5331,19 @@ public class Configuration {
// Validate the SPNEGO keytab file to ensure it was set, it exists and it is readable by Ambari.
// Log any found issues.
if (StringUtils.isEmpty(kerberosAuthProperties.getSpnegoKeytabFilePath())) {
- String message = String.format("The SPNEGO keytab file path specified in %s is empty. " +
+ throw new IllegalArgumentException(String.format("The SPNEGO keytab file path specified in %s is empty. " +
"This will cause issues authenticating users using Kerberos.",
- KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
- LOG.error(message);
- throw new IllegalArgumentException(message);
+ KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey()));
} else {
File keytabFile = new File(kerberosAuthProperties.getSpnegoKeytabFilePath());
if (!keytabFile.exists()) {
- String message = String.format("The SPNEGO keytab file path (%s) specified in %s does not exist. " +
+ throw new IllegalArgumentException(String.format("The SPNEGO keytab file path (%s) specified in %s does not exist. " +
"This will cause issues authenticating users using Kerberos.",
- keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
- LOG.error(message);
- throw new IllegalArgumentException(message);
+ keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey()));
} else if (!keytabFile.canRead()) {
- String message = String.format("The SPNEGO keytab file path (%s) specified in %s cannot be read. " +
+ throw new IllegalArgumentException(String.format("The SPNEGO keytab file path (%s) specified in %s cannot be read. " +
"This will cause issues authenticating users using Kerberos.",
- keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
- LOG.error(message);
- throw new IllegalArgumentException(message);
+ keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey()));
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index 1153d01..c4d21fc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -70,8 +70,6 @@ public interface KerberosHelper {
String AMBARI_IDENTITY_NAME = "ambari-server";
- String SPNEGO_IDENTITY_NAME = "spnego";
-
String CREATE_AMBARI_PRINCIPAL = "create_ambari_principal";
String MANAGE_IDENTITIES = "manage_identities";
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index a3c6fd4..5bc5cd8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -61,7 +61,7 @@ import org.apache.ambari.server.security.encryption.CredentialStoreService;
import org.apache.ambari.server.serveraction.ActionLog;
import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
-import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIdentitiesServerAction;
+import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIndetityServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
@@ -761,7 +761,7 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param ambariServerIdentity the ambari server's {@link KerberosIdentityDescriptor}
* @param configurations a map of compiled configrations used for variable replacment
* @throws AmbariException
- * @see ConfigureAmbariIdentitiesServerAction#installAmbariServerIdentity(String, String, String, ActionLog)
+ * @see ConfigureAmbariIndetityServerAction#installAmbariServerIdentity(String, String, String, ActionLog)
*/
private void installAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
Map<String, Map<String, String>> configurations) throws AmbariException {
@@ -775,7 +775,7 @@ public class KerberosHelperImpl implements KerberosHelper {
if(keytabDescriptor != null) {
String keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
- injector.getInstance(ConfigureAmbariIdentitiesServerAction.class)
+ injector.getInstance(ConfigureAmbariIndetityServerAction.class)
.installAmbariServerIdentity(principal, ambariServerPrincipalEntity.getCachedKeytabPath(), keytabFilePath, null);
}
}
@@ -1259,7 +1259,7 @@ public class KerberosHelperImpl implements KerberosHelper {
serviceName, componentName, kerberosDescriptor, filterContext);
if (hostname.equals(ambariServerHostname)) {
- addAmbariServerIdentities(kerberosEnvConfig.getProperties(), kerberosDescriptor, identities);
+ addAmbariServerIdentity(kerberosEnvConfig.getProperties(), kerberosDescriptor, identities);
}
if (!identities.isEmpty()) {
@@ -1346,7 +1346,7 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param kerberosDescriptor the kerberos descriptor
* @param identities the collection of identities to add to
*/
- void addAmbariServerIdentities(Map<String, String> kerberosEnvProperties, KerberosDescriptor kerberosDescriptor, List<KerberosIdentityDescriptor> identities) {
+ void addAmbariServerIdentity(Map<String, String> kerberosEnvProperties, KerberosDescriptor kerberosDescriptor, List<KerberosIdentityDescriptor> identities) {
// Determine if we should _calculate_ the Ambari service identity.
// If kerberos-env/create_ambari_principal is not set to false the identity should be calculated.
boolean createAmbariPrincipal = (kerberosEnvProperties == null) || !"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
@@ -1357,12 +1357,6 @@ public class KerberosHelperImpl implements KerberosHelper {
if (ambariServerIdentity != null) {
identities.add(ambariServerIdentity);
}
-
- // Add the spnego principal for the Ambari server host....
- KerberosIdentityDescriptor spnegoIdentity = kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME);
- if (spnegoIdentity != null) {
- identities.add(spnegoIdentity);
- }
}
}
@@ -2805,7 +2799,7 @@ public class KerberosHelperImpl implements KerberosHelper {
clusterHostInfoJson,
"{}",
hostParamsJson,
- ConfigureAmbariIdentitiesServerAction.class,
+ ConfigureAmbariIndetityServerAction.class,
event,
commandParameters,
"Configure Ambari Identity",
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java b/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
index e37976f..2028f46 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
@@ -28,19 +28,6 @@ import java.io.IOException;
public class AmbariEntryPoint implements AuthenticationEntryPoint {
@Override
public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException) throws IOException, ServletException {
- /* *****************************************************************************************
- * To maintain backward compatibility and respond with the appropriate response when
- * authentication is needed, by default return an HTTP 403 status.
- *
- * However if requested by the user, respond such that the client is challenged to Negotiate
- * and reissue the request with a Kerberos token. This response is an HTTP 401 status with the
- * WWW-Authenticate: Negotiate" header.
- * ****************************************************************************************** */
- if ("true".equalsIgnoreCase(request.getHeader("X-Negotiate-Authentication"))) {
- response.setHeader("WWW-Authenticate", "Negotiate");
- response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication requested");
- } else {
- response.sendError(HttpServletResponse.SC_FORBIDDEN, authException.getMessage());
- }
+ response.sendError(HttpServletResponse.SC_FORBIDDEN, authException.getMessage());
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
deleted file mode 100644
index 3c62646..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.security.authentication.kerberos;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
-import org.apache.ambari.server.security.authorization.UserType;
-import org.apache.ambari.server.security.authorization.Users;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.security.core.userdetails.User;
-import org.springframework.security.core.userdetails.UserDetails;
-import org.springframework.security.core.userdetails.UserDetailsService;
-import org.springframework.security.core.userdetails.UsernameNotFoundException;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * AmbariAuthToLocalUserDetailsService is a {@link UserDetailsService} that translates
- * a Kerberos principal name into a local username that may be used when looking up
- * and Ambari user account.
- */
-public class AmbariAuthToLocalUserDetailsService implements UserDetailsService {
- private static final Logger LOG = LoggerFactory.getLogger(AmbariAuthToLocalUserDetailsService.class);
-
- private final Users users;
-
- private final List<UserType> userTypeOrder;
-
- /**
- * Constructor.
- * <p>
- * Given the Ambari {@link Configuration}, initializes the {@link KerberosName} class using
- * the <code>auth-to-local</code> rules from {@link AmbariKerberosAuthenticationProperties#getAuthToLocalRules()}.
- *
- * @param configuration the Ambari configuration data
- * @param users the Ambari users access object
- * @throws AmbariException if an error occurs parsing the user-provided auth-to-local rules
- */
- public AmbariAuthToLocalUserDetailsService(Configuration configuration, Users users) throws AmbariException {
- String authToLocalRules = null;
- List<UserType> orderedUserTypes = null;
-
- if (configuration != null) {
- AmbariKerberosAuthenticationProperties properties = configuration.getKerberosAuthenticationProperties();
-
- if (properties != null) {
- authToLocalRules = properties.getAuthToLocalRules();
- orderedUserTypes = properties.getOrderedUserTypes();
- }
- }
-
- if (StringUtils.isEmpty(authToLocalRules)) {
- authToLocalRules = "DEFAULT";
- }
-
- if ((orderedUserTypes == null) || orderedUserTypes.isEmpty()) {
- orderedUserTypes = Collections.singletonList(UserType.LDAP);
- }
-
- KerberosName.setRules(authToLocalRules);
-
- this.users = users;
- this.userTypeOrder = orderedUserTypes;
- }
-
- @Override
- public UserDetails loadUserByUsername(String principal) throws UsernameNotFoundException {
- KerberosName kerberosName = new KerberosName(principal);
-
- try {
- String username = kerberosName.getShortName();
-
- if (username == null) {
- String message = String.format("Failed to translate %s to a local username during Kerberos authentication.", principal);
- LOG.warn(message);
- throw new UsernameNotFoundException(message);
- }
-
- LOG.info("Translated {} to {} using auth-to-local rules during Kerberos authentication.", principal, username);
- return createUser(username);
- } catch (IOException e) {
- String message = String.format("Failed to translate %s to a local username during Kerberos authentication: %s", principal, e.getLocalizedMessage());
- LOG.warn(message);
- throw new UsernameNotFoundException(message, e);
- }
- }
-
- /**
- * Given a username, finds an appropriate account in the Ambari database.
- * <p>
- * User accounts are searched in order of preferred user type as specified in the Ambari configuration
- * ({@link Configuration#KERBEROS_AUTH_USER_TYPES}).
- *
- * @param username a username
- * @return the user details of the found user, or <code>null</code> if an appropriate user was not found
- */
- private UserDetails createUser(String username) {
- // Iterate over the ordered user types... when an account for the username/type combination is
- // found, build the related AmbariUserAuthentication instance and return it. Only the first
- // match matters... this may be an issue and cause some ambiguity in the event multiple user
- // types are specified in the configuration and multiple accounts for the same username, but
- // different types (LOCAL vs LDAP, etc...).
- for (UserType userType : userTypeOrder) {
- org.apache.ambari.server.security.authorization.User user = users.getUser(username, userType);
-
- if (user != null) {
- Collection<AmbariGrantedAuthority> userAuthorities = users.getUserAuthorities(user.getUserName(), user.getUserType());
- return new User(username, "", userAuthorities);
- }
- }
-
- String message = String.format("Failed find user account for user with username of %s during Kerberos authentication.", username);
- LOG.warn(message);
- throw new UsernameNotFoundException(message);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java
deleted file mode 100644
index a5a3922..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.security.authentication.kerberos;
-
-import org.apache.ambari.server.audit.AuditLogger;
-import org.apache.ambari.server.audit.event.AuditEvent;
-import org.apache.ambari.server.audit.event.LoginAuditEvent;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.security.authentication.AmbariAuthenticationFilter;
-import org.apache.ambari.server.security.authorization.AuthorizationHelper;
-import org.apache.ambari.server.security.authorization.PermissionHelper;
-import org.apache.ambari.server.utils.RequestUtils;
-import org.springframework.security.authentication.AuthenticationManager;
-import org.springframework.security.core.Authentication;
-import org.springframework.security.core.AuthenticationException;
-import org.springframework.security.kerberos.web.authentication.SpnegoAuthenticationProcessingFilter;
-import org.springframework.security.web.AuthenticationEntryPoint;
-import org.springframework.security.web.authentication.AuthenticationFailureHandler;
-import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
-
-import javax.servlet.FilterChain;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
-/**
- * AmbariKerberosAuthenticationFilter extends the {@link SpnegoAuthenticationProcessingFilter} class
- * to perform Kerberos-based authentication for Ambari.
- * <p>
- * If configured, auditing is performed using {@link AuditLogger}.
- */
-public class AmbariKerberosAuthenticationFilter extends SpnegoAuthenticationProcessingFilter implements AmbariAuthenticationFilter {
-
- /**
- * Audit logger
- */
- private final AuditLogger auditLogger;
-
- /**
- * A Boolean value indicating whether Kerberos authentication is enabled or not.
- */
- private final boolean kerberosAuthenticationEnabled;
-
- /**
- * Constructor.
- * <p>
- * Given supplied data, sets up the the {@link SpnegoAuthenticationProcessingFilter} to perform
- * authentication and audit logging if configured do to so.
- *
- * @param authenticationManager the Spring authentication manager
- * @param entryPoint the Spring entry point
- * @param configuration the Ambari configuration data
- * @param auditLogger an audit logger
- * @param permissionHelper a permission helper to aid in audit logging
- */
- public AmbariKerberosAuthenticationFilter(AuthenticationManager authenticationManager, final AuthenticationEntryPoint entryPoint, Configuration configuration, final AuditLogger auditLogger, final PermissionHelper permissionHelper) {
- AmbariKerberosAuthenticationProperties kerberosAuthenticationProperties = (configuration == null)
- ? null
- : configuration.getKerberosAuthenticationProperties();
-
- kerberosAuthenticationEnabled = (kerberosAuthenticationProperties != null) && kerberosAuthenticationProperties.isKerberosAuthenticationEnabled();
-
- this.auditLogger = auditLogger;
-
- setAuthenticationManager(authenticationManager);
-
- setFailureHandler(new AuthenticationFailureHandler() {
- @Override
- public void onAuthenticationFailure(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, AuthenticationException e) throws IOException, ServletException {
- if (auditLogger.isEnabled()) {
- AuditEvent loginFailedAuditEvent = LoginAuditEvent.builder()
- .withRemoteIp(RequestUtils.getRemoteAddress(httpServletRequest))
- .withTimestamp(System.currentTimeMillis())
- .withReasonOfFailure(e.getLocalizedMessage())
- .build();
- auditLogger.log(loginFailedAuditEvent);
- }
-
- entryPoint.commence(httpServletRequest, httpServletResponse, e);
- }
- });
-
- setSuccessHandler(new AuthenticationSuccessHandler() {
- @Override
- public void onAuthenticationSuccess(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Authentication authentication) throws IOException, ServletException {
- if (auditLogger.isEnabled()) {
- AuditEvent loginSucceededAuditEvent = LoginAuditEvent.builder()
- .withRemoteIp(RequestUtils.getRemoteAddress(httpServletRequest))
- .withUserName(authentication.getName())
- .withTimestamp(System.currentTimeMillis())
- .withRoles(permissionHelper.getPermissionLabels(authentication))
- .build();
- auditLogger.log(loginSucceededAuditEvent);
- }
- }
- });
- }
-
- /**
- * Tests to determine if this authentication filter is applicable given the Ambari configuration
- * and the user's HTTP request.
- * <p>
- * If the Ambari configuration indicates the Kerberos authentication is enabled and the HTTP request
- * contains the appropriate <code>Authorization</code> header, than this filter may be applied;
- * otherwise it should be skipped.
- *
- * @param httpServletRequest the request
- * @return true if this filter should be applied; false otherwise
- */
- @Override
- public boolean shouldApply(HttpServletRequest httpServletRequest) {
- if (kerberosAuthenticationEnabled) {
- String header = httpServletRequest.getHeader("Authorization");
- return (header != null) && (header.startsWith("Negotiate ") || header.startsWith("Kerberos "));
- } else {
- return false;
- }
- }
-
- /**
- * Performs the logic for this filter.
- * <p>
- * Checks whether the authentication information is filled. If it is not, then a login failed audit event is logged.
- * <p>
- * Then, forwards the workflow to {@link SpnegoAuthenticationProcessingFilter#doFilter(ServletRequest, ServletResponse, FilterChain)}
- *
- * @param servletRequest the request
- * @param servletResponse the response
- * @param filterChain the Spring filter chain
- * @throws IOException
- * @throws ServletException
- */
- @Override
- public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
- HttpServletRequest httpServletRequest = (HttpServletRequest) servletRequest;
-
- if (shouldApply(httpServletRequest)) {
- if (auditLogger.isEnabled() && (AuthorizationHelper.getAuthenticatedName() == null)) {
- AuditEvent loginFailedAuditEvent = LoginAuditEvent.builder()
- .withRemoteIp(RequestUtils.getRemoteAddress(httpServletRequest))
- .withTimestamp(System.currentTimeMillis())
- .withReasonOfFailure("Authentication required")
- .withUserName(null)
- .build();
- auditLogger.log(loginFailedAuditEvent);
- }
-
- super.doFilter(servletRequest, servletResponse, filterChain);
- } else {
- filterChain.doFilter(servletRequest, servletResponse);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java
deleted file mode 100644
index bb57108..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.security.authentication.kerberos;
-
-import org.apache.ambari.server.configuration.Configuration;
-import org.springframework.beans.factory.InitializingBean;
-import org.springframework.core.io.FileSystemResource;
-import org.springframework.core.io.Resource;
-import org.springframework.security.authentication.BadCredentialsException;
-import org.springframework.security.kerberos.authentication.KerberosTicketValidation;
-import org.springframework.security.kerberos.authentication.KerberosTicketValidator;
-import org.springframework.security.kerberos.authentication.sun.SunJaasKerberosTicketValidator;
-
-/**
- * AmbariKerberosTicketValidator is a {@link KerberosTicketValidator} implementation that delegates
- * to a {@link SunJaasKerberosTicketValidator}, if Kerberos authentication is enabled.
- * <p>
- * If Kerberos authentication is enabled, the following properties are set:
- * <ul>
- * <li>{@link SunJaasKerberosTicketValidator#setServicePrincipal(String)} using the Ambari server property from {@link Configuration#KERBEROS_AUTH_SPNEGO_PRINCIPAL}</li>
- * <li>{@link SunJaasKerberosTicketValidator#setKeyTabLocation(Resource)} using the Ambari server property from {@link Configuration#KERBEROS_AUTH_SPNEGO_KEYTAB_FILE}</li>
- * </ul>
- */
-public class AmbariKerberosTicketValidator implements KerberosTicketValidator, InitializingBean {
-
- private final SunJaasKerberosTicketValidator kerberosTicketValidator;
-
- /**
- * Creates a new AmbariKerberosTicketValidator
- *
- * @param configuration the Ambari server configuration
- */
- public AmbariKerberosTicketValidator(Configuration configuration) {
-
- AmbariKerberosAuthenticationProperties properties = (configuration == null)
- ? null
- : configuration.getKerberosAuthenticationProperties();
-
- if ((properties != null) && properties.isKerberosAuthenticationEnabled()) {
- kerberosTicketValidator = new SunJaasKerberosTicketValidator();
- kerberosTicketValidator.setServicePrincipal(properties.getSpnegoPrincipalName());
-
- if (properties.getSpnegoKeytabFilePath() != null) {
- kerberosTicketValidator.setKeyTabLocation(new FileSystemResource(properties.getSpnegoKeytabFilePath()));
- }
- } else {
- // Don't create the SunJaasKerberosTicketValidator if Kerberos authentication is not enabled.
- kerberosTicketValidator = null;
- }
- }
-
- @Override
- public void afterPropertiesSet() throws Exception {
- // If Kerberos authentication is enabled, forward this method invocation to the backing
- // SunJaasKerberosTicketValidator instance.
- if (kerberosTicketValidator != null) {
- kerberosTicketValidator.afterPropertiesSet();
- }
- }
-
- @Override
- public KerberosTicketValidation validateTicket(byte[] bytes) throws BadCredentialsException {
- // If Kerberos authentication is enabled, forward this method invocation to the backing
- // SunJaasKerberosTicketValidator instance.
- return (kerberosTicketValidator == null)
- ? null
- : kerberosTicketValidator.validateTicket(bytes);
- }
-
- public void setDebug(boolean debug) {
- // If Kerberos authentication is enabled, forward this method invocation to the backing
- // SunJaasKerberosTicketValidator instance.
- if (kerberosTicketValidator != null) {
- kerberosTicketValidator.setDebug(debug);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index c283a65..b6b0713 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -156,15 +156,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
if (ambariServerIdentity != null) {
List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(ambariServerIdentity);
kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
- identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SERVER", "AMBARI_SERVER", kerberosConfigurations, configurations);
- propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
- }
-
- KerberosIdentityDescriptor spnegoIdentity = kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME);
- if (spnegoIdentity != null) {
- List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(spnegoIdentity);
- kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
- identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SERVER", "SPNEGO", kerberosConfigurations, configurations);
+ identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SEVER", "AMBARI_SEVER", kerberosConfigurations, configurations);
propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
deleted file mode 100644
index 9c2c622..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.serveraction.kerberos;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-
-import com.google.inject.Inject;
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.agent.CommandReport;
-import org.apache.ambari.server.controller.KerberosHelper;
-import org.apache.ambari.server.controller.utilities.KerberosChecker;
-import org.apache.ambari.server.orm.dao.HostDAO;
-import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
-import org.apache.ambari.server.orm.entities.HostEntity;
-import org.apache.ambari.server.serveraction.ActionLog;
-import org.apache.ambari.server.utils.ShellCommandUtil;
-import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.io.FileUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * ConfigureAmbariIdentitiesServerAction is a ServerAction implementation that creates keytab files as
- * instructed.
- * <p/>
- * This class mainly relies on the KerberosServerAction to iterate through metadata identifying
- * the Kerberos keytab files that need to be created. For each identity in the metadata, this
- * implementation's
- * {@link KerberosServerAction#processIdentity(Map, String, KerberosOperationHandler, Map, Map)}
- * is invoked attempting the creation of the relevant keytab file.
- */
-public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction {
-
-
- private static final String KEYTAB_PATTERN = "keyTab=\"(.+)?\"";
- private static final String PRINCIPAL_PATTERN = "principal=\"(.+)?\"";
-
- private final static Logger LOG = LoggerFactory.getLogger(ConfigureAmbariIdentitiesServerAction.class);
-
- @Inject
- private KerberosPrincipalHostDAO kerberosPrincipalHostDAO;
-
- @Inject
- private HostDAO hostDAO;
-
- /**
- * Called to execute this action. Upon invocation, calls
- * {@link KerberosServerAction#processIdentities(Map)} )}
- * to iterate through the Kerberos identity metadata and call
- * {@link ConfigureAmbariIdentitiesServerAction#processIdentities(Map)}
- * for each identity to process.
- *
- * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
- * to a given request
- * @return a CommandReport indicating the result of this action
- * @throws AmbariException
- * @throws InterruptedException
- */
- @Override
- public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
- AmbariException, InterruptedException {
- return processIdentities(requestSharedDataContext);
- }
-
-
- /**
- * Creates keytab file for ambari-server identity.
- * <p/>
- * It is expected that the {@link CreatePrincipalsServerAction}
- * (or similar) and {@link CreateKeytabFilesServerAction} has executed before this action.
- *
- * @param identityRecord a Map containing the data for the current identity record
- * @param evaluatedPrincipal a String indicating the relevant principal
- * @param operationHandler a KerberosOperationHandler used to perform Kerberos-related
- * tasks for specific Kerberos implementations
- * (MIT, Active Directory, etc...)
- * @param kerberosConfiguration a Map of configuration properties from kerberos-env
- * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
- * to a given request @return a CommandReport, indicating an error
- * condition; or null, indicating a success condition
- * @throws AmbariException if an error occurs while processing the identity record
- */
- @Override
- protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal,
- KerberosOperationHandler operationHandler,
- Map<String, String> kerberosConfiguration,
- Map<String, Object> requestSharedDataContext)
- throws AmbariException {
- CommandReport commandReport = null;
-
- if (identityRecord != null) {
- String message;
- String dataDirectory = getDataDirectoryPath();
-
- if (dataDirectory == null) {
- message = "The data directory has not been set. Generated keytab files can not be stored.";
- LOG.error(message);
- commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
- } else {
-
- String hostName = identityRecord.get(KerberosIdentityDataFileReader.HOSTNAME);
- if (hostName != null && hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
- String destKeytabFilePath = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
- File hostDirectory = new File(dataDirectory, hostName);
- File srcKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(destKeytabFilePath));
-
- if (srcKeytabFile.exists()) {
- installAmbariServerIdentity(evaluatedPrincipal, srcKeytabFile.getAbsolutePath(), destKeytabFilePath, actionLog);
-
- if ("AMBARI_SERVER".equals(identityRecord.get(KerberosIdentityDataFileReader.COMPONENT))) {
- // Create/update the JAASFile...
- configureJAAS(evaluatedPrincipal, destKeytabFilePath, actionLog);
- }
- }
- }
- }
- }
-
- return commandReport;
- }
-
- /**
- * Installs the Ambari Server Kerberos identity by copying its keytab file to the specified location
- * and then creating the Ambari Server JAAS File.
- *
- * @param principal the ambari server principal name
- * @param srcKeytabFilePath the source location of the ambari server keytab file
- * @param destKeytabFilePath the destination location of the ambari server keytab file
- * @param actionLog the logger
- * @return true if success; false otherwise
- * @throws AmbariException
- */
- public boolean installAmbariServerIdentity(String principal,
- String srcKeytabFilePath,
- String destKeytabFilePath,
- ActionLog actionLog) throws AmbariException {
-
- // Use sudo to copy the file into place....
- try {
- ShellCommandUtil.Result result;
-
- // Ensure the parent directory exists...
- File destKeytabFile = new File(destKeytabFilePath);
- result = ShellCommandUtil.mkdir(destKeytabFile.getParent(), true);
- if (!result.isSuccessful()) {
- throw new AmbariException(result.getStderr());
- }
-
- // Copy the keytab file into place...
- result = ShellCommandUtil.copyFile(srcKeytabFilePath, destKeytabFilePath, true, true);
- if (!result.isSuccessful()) {
- throw new AmbariException(result.getStderr());
- } else {
- String ambariServerHostName = StageUtils.getHostName();
- HostEntity ambariServerHostEntity = hostDAO.findByName(ambariServerHostName);
- Long ambariServerHostID = (ambariServerHostEntity == null)
- ? null
- : ambariServerHostEntity.getHostId();
-
- if (ambariServerHostID == null) {
- String message = String.format("Failed to add the kerberos_principal_host record for %s on " +
- "the Ambari server host since the host id for Ambari server host, %s, was not found." +
- " This is not an error if an Ambari agent is not installed on the Ambari server host.",
- principal, ambariServerHostName);
- LOG.warn(message);
- actionLog.writeStdErr(message);
- } else if (!kerberosPrincipalHostDAO.exists(principal, ambariServerHostID)) {
- kerberosPrincipalHostDAO.create(principal, ambariServerHostID);
- }
-
- actionLog.writeStdOut(String.format("Created Ambari server keytab file for %s at %s", principal, destKeytabFile));
- }
- } catch (InterruptedException | IOException e) {
- throw new AmbariException(e.getLocalizedMessage(), e);
- }
-
- return true;
- }
-
- private void configureJAAS(String evaluatedPrincipal, String keytabFilePath, ActionLog actionLog) {
- String jaasConfPath = System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
- if (jaasConfPath != null) {
- File jaasConfigFile = new File(jaasConfPath);
- try {
- String jaasConfig = FileUtils.readFileToString(jaasConfigFile);
- File oldJaasConfigFile = new File(jaasConfPath + ".bak");
- FileUtils.writeStringToFile(oldJaasConfigFile, jaasConfig);
- jaasConfig = jaasConfig.replaceFirst(KEYTAB_PATTERN, "keyTab=\"" + keytabFilePath + "\"");
- jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + evaluatedPrincipal + "\"");
- FileUtils.writeStringToFile(jaasConfigFile, jaasConfig);
- String message = String.format("JAAS config file %s modified successfully for principal %s.", jaasConfigFile
- .getName(), evaluatedPrincipal);
- if (actionLog != null) {
- actionLog.writeStdOut(message);
- }
- } catch (IOException e) {
- String message = String.format("Failed to configure JAAS file %s for %s - %s", jaasConfigFile,
- evaluatedPrincipal, e.getMessage());
- if (actionLog != null) {
- actionLog.writeStdErr(message);
- }
- LOG.error(message, e);
- }
- } else {
- String message = String.format("Failed to configure JAAS, config file should be passed to Ambari server as: " +
- "%s.", KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
- if (actionLog != null) {
- actionLog.writeStdErr(message);
- }
- LOG.error(message);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
new file mode 100644
index 0000000..96540ef
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.utilities.KerberosChecker;
+import org.apache.ambari.server.serveraction.ActionLog;
+import org.apache.ambari.server.utils.ShellCommandUtil;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.io.FileUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * ConfigureAmbariIndetityServerAction is a ServerAction implementation that creates keytab files as
+ * instructed.
+ * <p/>
+ * This class mainly relies on the KerberosServerAction to iterate through metadata identifying
+ * the Kerberos keytab files that need to be created. For each identity in the metadata, this
+ * implementation's
+ * {@link KerberosServerAction#processIdentity(Map, String, KerberosOperationHandler, Map, Map)}
+ * is invoked attempting the creation of the relevant keytab file.
+ */
+public class ConfigureAmbariIndetityServerAction extends KerberosServerAction {
+
+
+ private static final String KEYTAB_PATTERN = "keyTab=\"(.+)?\"";
+ private static final String PRINCIPAL_PATTERN = "principal=\"(.+)?\"";
+
+ private final static Logger LOG = LoggerFactory.getLogger(ConfigureAmbariIndetityServerAction.class);
+
+ /**
+ * Called to execute this action. Upon invocation, calls
+ * {@link KerberosServerAction#processIdentities(Map)} )}
+ * to iterate through the Kerberos identity metadata and call
+ * {@link ConfigureAmbariIndetityServerAction#processIdentities(Map)}
+ * for each identity to process.
+ *
+ * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+ * to a given request
+ * @return a CommandReport indicating the result of this action
+ * @throws AmbariException
+ * @throws InterruptedException
+ */
+ @Override
+ public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
+ AmbariException, InterruptedException {
+ return processIdentities(requestSharedDataContext);
+ }
+
+
+ /**
+ * Creates keytab file for ambari-server identity.
+ * <p/>
+ * It is expected that the {@link CreatePrincipalsServerAction}
+ * (or similar) and {@link CreateKeytabFilesServerAction} has executed before this action.
+ *
+ * @param identityRecord a Map containing the data for the current identity record
+ * @param evaluatedPrincipal a String indicating the relevant principal
+ * @param operationHandler a KerberosOperationHandler used to perform Kerberos-related
+ * tasks for specific Kerberos implementations
+ * (MIT, Active Directory, etc...)
+ * @param kerberosConfiguration a Map of configuration properties from kerberos-env
+ * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+ * to a given request @return a CommandReport, indicating an error
+ * condition; or null, indicating a success condition
+ * @throws AmbariException if an error occurs while processing the identity record
+ */
+ @Override
+ protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal,
+ KerberosOperationHandler operationHandler,
+ Map<String, String> kerberosConfiguration,
+ Map<String, Object> requestSharedDataContext)
+ throws AmbariException {
+ CommandReport commandReport = null;
+
+ if (identityRecord != null) {
+ String message;
+ String dataDirectory = getDataDirectoryPath();
+
+ if (operationHandler == null) {
+ message = String.format("Failed to create keytab file for %s, missing KerberosOperationHandler", evaluatedPrincipal);
+ actionLog.writeStdErr(message);
+ LOG.error(message);
+ commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+ } else if (dataDirectory == null) {
+ message = "The data directory has not been set. Generated keytab files can not be stored.";
+ LOG.error(message);
+ commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+ } else {
+
+ String hostName = identityRecord.get(KerberosIdentityDataFileReader.HOSTNAME);
+ if (hostName != null && hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
+ String destKeytabFilePath = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
+ File hostDirectory = new File(dataDirectory, hostName);
+ File srcKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(destKeytabFilePath));
+
+ if(srcKeytabFile.exists()) {
+ installAmbariServerIdentity(evaluatedPrincipal, srcKeytabFile.getAbsolutePath(), destKeytabFilePath, actionLog);
+ }
+ }
+ }
+ }
+
+ return commandReport;
+ }
+
+ /**
+ * Installs the Ambari Server Kerberos identity by copying its keytab file to the specified location
+ * and then creating the Ambari Server JAAS File.
+ *
+ * @param principal the ambari server principal name
+ * @param srcKeytabFilePath the source location of the ambari server keytab file
+ * @param destKeytabFilePath the destination location of the ambari server keytab file
+ * @param actionLog the logger
+ * @return true if success; false otherwise
+ * @throws AmbariException
+ */
+ public boolean installAmbariServerIdentity(String principal,
+ String srcKeytabFilePath,
+ String destKeytabFilePath,
+ ActionLog actionLog) throws AmbariException {
+
+ // Use sudo to copy the file into place....
+ try {
+ ShellCommandUtil.Result result;
+
+ // Ensure the parent directory exists...
+ File destKeytabFile = new File(destKeytabFilePath);
+ result = ShellCommandUtil.mkdir(destKeytabFile.getParent(), true);
+ if (!result.isSuccessful()) {
+ throw new AmbariException(result.getStderr());
+ }
+
+ // Copy the keytab file into place...
+ result = ShellCommandUtil.copyFile(srcKeytabFilePath, destKeytabFilePath, true, true);
+ if (!result.isSuccessful()) {
+ throw new AmbariException(result.getStderr());
+ }
+ } catch (InterruptedException | IOException e) {
+ throw new AmbariException(e.getLocalizedMessage(), e);
+ }
+
+ // Create/update the JAASFile...
+ configureJAAS(principal, destKeytabFilePath, actionLog);
+
+ return true;
+ }
+
+ private void configureJAAS(String evaluatedPrincipal, String keytabFilePath, ActionLog actionLog) {
+ String jaasConfPath = System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+ if (jaasConfPath != null) {
+ File jaasConfigFile = new File(jaasConfPath);
+ try {
+ String jaasConfig = FileUtils.readFileToString(jaasConfigFile);
+ File oldJaasConfigFile = new File(jaasConfPath + ".bak");
+ FileUtils.writeStringToFile(oldJaasConfigFile, jaasConfig);
+ jaasConfig = jaasConfig.replaceFirst(KEYTAB_PATTERN, "keyTab=\"" + keytabFilePath + "\"");
+ jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + evaluatedPrincipal + "\"");
+ FileUtils.writeStringToFile(jaasConfigFile, jaasConfig);
+ String message = String.format("JAAS config file %s modified successfully for principal %s.", jaasConfigFile
+ .getName(), evaluatedPrincipal);
+ if (actionLog != null) {
+ actionLog.writeStdOut(message);
+ }
+ } catch (IOException e) {
+ String message = String.format("Failed to configure JAAS file %s for %s - %s", jaasConfigFile,
+ evaluatedPrincipal, e.getMessage());
+ if (actionLog != null) {
+ actionLog.writeStdErr(message);
+ }
+ LOG.error(message, e);
+ }
+ } else {
+ String message = String.format("Failed to configure JAAS, config file should be passed to Ambari server as: " +
+ "%s.", KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+ if (actionLog != null) {
+ actionLog.writeStdErr(message);
+ }
+ LOG.error(message);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
index b99c25a..e31e6ff 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.serveraction.kerberos;
+import com.google.common.base.Optional;
import com.google.inject.Inject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
@@ -144,7 +145,7 @@ public class CreatePrincipalsServerAction extends KerberosServerAction {
// This principal has been processed and a keytab file has been distributed... do not process it.
processPrincipal = false;
} else {
- // This principal has been processed but a keytab file for it has not been distributed... process it.
+ // This principal has been processed but a keytab file for it has been distributed... process it.
processPrincipal = true;
}
}
@@ -231,7 +232,7 @@ public class CreatePrincipalsServerAction extends KerberosServerAction {
String password = securePasswordHelper.createSecurePassword(length, minLowercaseLetters, minUppercaseLetters, minDigits, minPunctuation, minWhitespace);
try {
- /*
+ /**
* true indicates a new principal was created, false indicates an existing principal was updated
*/
boolean created;
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index 1d8c1ca..db210e0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -28,7 +28,6 @@ import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
import org.apache.ambari.server.serveraction.AbstractServerAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.utils.StageUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -515,19 +514,12 @@ public abstract class KerberosServerAction extends AbstractServerAction {
if (record != null) {
String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
+ String host = record.get(KerberosIdentityDataFileReader.HOSTNAME);
if (principal != null) {
- String hostname = record.get(KerberosIdentityDataFileReader.HOSTNAME);
-
- if(KerberosHelper.AMBARI_SERVER_HOST_NAME.equals(hostname)) {
- // Replace KerberosHelper.AMBARI_SERVER_HOST_NAME with the actual hostname where the Ambari
- // server is... this host
- hostname = StageUtils.getHostName();
- }
-
// Evaluate the principal "pattern" found in the record to generate the "evaluated principal"
// by replacing the _HOST and _REALM variables.
- String evaluatedPrincipal = principal.replace("_HOST", hostname).replace("_REALM", defaultRealm);
+ String evaluatedPrincipal = principal.replace("_HOST", host).replace("_REALM", defaultRealm);
commandReport = processIdentity(record, evaluatedPrincipal, operationHandler, kerberosConfiguration, requestSharedDataContext);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml b/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
index 500c0bf..a86973c 100644
--- a/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
+++ b/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
@@ -30,11 +30,16 @@
<custom-filter ref="ambariAuthorizationFilter" before="FILTER_SECURITY_INTERCEPTOR"/>
</http>
+ <!--<ldap-server id="ldapServer" root="dc=ambari,dc=apache,dc=org"/>-->
+
<authentication-manager alias="authenticationManager">
+
<authentication-provider ref="ambariLocalAuthenticationProvider"/>
+
<authentication-provider ref="ambariLdapAuthenticationProvider"/>
+
<authentication-provider ref="ambariInternalAuthenticationProvider"/>
- <authentication-provider ref="kerberosServiceAuthenticationProvider"/>
+
</authentication-manager>
<beans:bean id="ambariEntryPoint" class="org.apache.ambari.server.security.AmbariEntryPoint">
@@ -44,7 +49,6 @@
<beans:constructor-arg>
<beans:list>
<beans:ref bean="ambariBasicAuthenticationFilter"/>
- <beans:ref bean="ambariKerberosAuthenticationFilter"/>
<beans:ref bean="ambariJwtAuthenticationFilter"/>
</beans:list>
</beans:constructor-arg>
@@ -65,14 +69,6 @@
<beans:constructor-arg ref="permissionHelper"/>
</beans:bean>
- <beans:bean id="ambariKerberosAuthenticationFilter" class="org.apache.ambari.server.security.authentication.kerberos.AmbariKerberosAuthenticationFilter">
- <beans:constructor-arg ref="authenticationManager"/>
- <beans:constructor-arg ref="ambariEntryPoint"/>
- <beans:constructor-arg ref="ambariConfiguration"/>
- <beans:constructor-arg ref="auditLogger"/>
- <beans:constructor-arg ref="permissionHelper"/>
- </beans:bean>
-
<beans:bean id="ambariAuthorizationFilter" class="org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter">
<beans:constructor-arg ref="ambariEntryPoint"/>
<beans:constructor-arg ref="ambariConfiguration"/>
@@ -81,20 +77,4 @@
<beans:constructor-arg ref="permissionHelper"/>
</beans:bean>
- <beans:bean id="kerberosServiceAuthenticationProvider" class="org.springframework.security.kerberos.authentication.KerberosServiceAuthenticationProvider">
- <beans:property name="ticketValidator">
- <beans:bean class="org.apache.ambari.server.security.authentication.kerberos.AmbariKerberosTicketValidator">
- <beans:constructor-arg ref="ambariConfiguration"/>
- <beans:property name="debug" value="false"/>
- </beans:bean>
- </beans:property>
-
- <beans:property name="userDetailsService" ref="authToLocalUserDetailsService"/>
- </beans:bean>
-
- <beans:bean id="authToLocalUserDetailsService" class="org.apache.ambari.server.security.authentication.kerberos.AmbariAuthToLocalUserDetailsService">
- <beans:constructor-arg ref="ambariConfiguration"/>
- <beans:constructor-arg ref="ambariUsers"/>
- </beans:bean>
-
</beans:beans>
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 7e6a056..3c97ce9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -104,6 +104,7 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
+import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -3754,7 +3755,6 @@ public class KerberosHelperTest extends EasyMockSupport {
KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
if (createAmbariPrincipal) {
expect(kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME)).andReturn(ambariKerberosIdentity).once();
- expect(kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME)).andReturn(ambariKerberosIdentity).once();
}
List<KerberosIdentityDescriptor> identities = new ArrayList<KerberosIdentityDescriptor>();
@@ -3764,12 +3764,12 @@ public class KerberosHelperTest extends EasyMockSupport {
// Needed by infrastructure
injector.getInstance(AmbariMetaInfo.class).init();
- kerberosHelper.addAmbariServerIdentities(kerberosEnvProperties, kerberosDescriptor, identities);
+ kerberosHelper.addAmbariServerIdentity(kerberosEnvProperties, kerberosDescriptor, identities);
verifyAll();
if (createAmbariPrincipal) {
- Assert.assertEquals(2, identities.size());
+ Assert.assertEquals(1, identities.size());
Assert.assertSame(ambariKerberosIdentity, identities.get(0));
} else {
Assert.assertTrue(identities.isEmpty());
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
deleted file mode 100644
index e980808..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.security.authentication.kerberos;
-
-import junit.framework.Assert;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
-import org.apache.ambari.server.security.authorization.User;
-import org.apache.ambari.server.security.authorization.UserType;
-import org.apache.ambari.server.security.authorization.Users;
-import org.easymock.EasyMockSupport;
-import org.junit.Test;
-import org.springframework.security.core.userdetails.UserDetails;
-import org.springframework.security.core.userdetails.UserDetailsService;
-import org.springframework.security.core.userdetails.UsernameNotFoundException;
-
-import java.util.Collection;
-import java.util.Collections;
-
-import static org.easymock.EasyMock.expect;
-
-public class AmbariAuthToLocalUserDetailsServiceTest extends EasyMockSupport {
- @Test
- public void loadUserByUsernameSuccess() throws Exception {
- AmbariKerberosAuthenticationProperties properties = new AmbariKerberosAuthenticationProperties();
-
- Configuration configuration = createMock(Configuration.class);
- expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
-
- User user = createMock(User.class);
- expect(user.getUserName()).andReturn("user1").once();
- expect(user.getUserType()).andReturn(UserType.LDAP).once();
-
- Collection<AmbariGrantedAuthority> userAuthorities = Collections.singletonList(createNiceMock(AmbariGrantedAuthority.class));
-
- Users users = createMock(Users.class);
- expect(users.getUser("user1", UserType.LDAP)).andReturn(user).once();
- expect(users.getUserAuthorities("user1", UserType.LDAP)).andReturn(userAuthorities).once();
-
- replayAll();
-
- UserDetailsService userdetailsService = new AmbariAuthToLocalUserDetailsService(configuration, users);
-
- UserDetails userDetails = userdetailsService.loadUserByUsername("user1@EXAMPLE.COM");
-
- verifyAll();
-
- Assert.assertNotNull(userDetails);
- Assert.assertEquals("user1", userDetails.getUsername());
- Assert.assertEquals(userAuthorities.size(), userDetails.getAuthorities().size());
- Assert.assertEquals("", userDetails.getPassword());
- }
-
- @Test(expected = UsernameNotFoundException.class)
- public void loadUserByUsernameUserNotFound() throws Exception {
- AmbariKerberosAuthenticationProperties properties = new AmbariKerberosAuthenticationProperties();
-
- Configuration configuration = createMock(Configuration.class);
- expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
-
- Users users = createMock(Users.class);
- expect(users.getUser("user1", UserType.LDAP)).andReturn(null).once();
- expect(users.getUser("user1", UserType.LOCAL)).andReturn(null).once();
-
- replayAll();
-
- UserDetailsService userdetailsService = new AmbariAuthToLocalUserDetailsService(configuration, users);
-
- userdetailsService.loadUserByUsername("user1@EXAMPLE.COM");
-
- verifyAll();
-
- Assert.fail("UsernameNotFoundException was not thrown");
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java
deleted file mode 100644
index d855cda..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.security.authentication.kerberos;
-
-import org.apache.ambari.server.audit.AuditLogger;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.security.authorization.PermissionHelper;
-import org.easymock.EasyMockSupport;
-import org.junit.Assert;
-import org.junit.Test;
-import org.springframework.security.authentication.AuthenticationManager;
-import org.springframework.security.web.AuthenticationEntryPoint;
-
-import javax.servlet.http.HttpServletRequest;
-
-import static org.easymock.EasyMock.expect;
-
-public class AmbariKerberosAuthenticationFilterTest extends EasyMockSupport {
- @Test
- public void shouldApplyTrue() throws Exception {
- HttpServletRequest httpServletRequest = createMock(HttpServletRequest.class);
- expect(httpServletRequest.getHeader("Authorization")).andReturn("Negotiate .....").once();
-
- AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
- expect(properties.isKerberosAuthenticationEnabled()).andReturn(true).once();
-
- Configuration configuration = createMock(Configuration.class);
- expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
-
- AuthenticationManager authenticationManager = createMock(AuthenticationManager.class);
- AuthenticationEntryPoint entryPoint = createMock(AuthenticationEntryPoint.class);
- AuditLogger auditLogger = createMock(AuditLogger.class);
- PermissionHelper permissionHelper = createMock(PermissionHelper.class);
-
- replayAll();
-
- AmbariKerberosAuthenticationFilter filter = new AmbariKerberosAuthenticationFilter(
- authenticationManager,
- entryPoint,
- configuration,
- auditLogger,
- permissionHelper
- );
-
- Assert.assertTrue(filter.shouldApply(httpServletRequest));
-
- verifyAll();
- }
-
- @Test
- public void shouldApplyFalseMissingHeader() throws Exception {
- HttpServletRequest httpServletRequest = createMock(HttpServletRequest.class);
- expect(httpServletRequest.getHeader("Authorization")).andReturn(null).once();
-
- AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
- expect(properties.isKerberosAuthenticationEnabled()).andReturn(true).once();
-
- Configuration configuration = createMock(Configuration.class);
- expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
-
- AuthenticationManager authenticationManager = createMock(AuthenticationManager.class);
- AuthenticationEntryPoint entryPoint = createMock(AuthenticationEntryPoint.class);
- AuditLogger auditLogger = createMock(AuditLogger.class);
- PermissionHelper permissionHelper = createMock(PermissionHelper.class);
-
- replayAll();
-
- AmbariKerberosAuthenticationFilter filter = new AmbariKerberosAuthenticationFilter(
- authenticationManager,
- entryPoint,
- configuration,
- auditLogger,
- permissionHelper
- );
-
- Assert.assertFalse(filter.shouldApply(httpServletRequest));
-
- verifyAll();
- }
-
- @Test
- public void shouldApplyNotFalseEnabled() throws Exception {
- HttpServletRequest httpServletRequest = createMock(HttpServletRequest.class);
-
- AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
- expect(properties.isKerberosAuthenticationEnabled()).andReturn(false).once();
-
- Configuration configuration = createMock(Configuration.class);
- expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
-
- AuthenticationManager authenticationManager = createMock(AuthenticationManager.class);
- AuthenticationEntryPoint entryPoint = createMock(AuthenticationEntryPoint.class);
- AuditLogger auditLogger = createMock(AuditLogger.class);
- PermissionHelper permissionHelper = createMock(PermissionHelper.class);
-
- replayAll();
-
- AmbariKerberosAuthenticationFilter filter = new AmbariKerberosAuthenticationFilter(
- authenticationManager,
- entryPoint,
- configuration,
- auditLogger,
- permissionHelper
- );
-
- Assert.assertFalse(filter.shouldApply(httpServletRequest));
-
- verifyAll();
- }
-
- @Test
- public void doFilter() throws Exception {
- // Skip this test since the real work is being done by SpnegoAuthenticationProcessingFilter, which
- // is a class in the Spring libraries.
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcf779d2/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java
deleted file mode 100644
index 9bc87a4..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.security.authentication.kerberos;
-
-import org.apache.ambari.server.configuration.Configuration;
-import org.easymock.EasyMockSupport;
-import org.junit.Test;
-
-import static org.easymock.EasyMock.expect;
-
-public class AmbariKerberosTicketValidatorTest extends EasyMockSupport {
-
- /**
- * Tests an {@link AmbariKerberosTicketValidator} to ensure that the Spnego identity is properly
- * set in the base class during construction.
- */
- @Test
- public void testConstructor() throws NoSuchMethodException {
- AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
- expect(properties.isKerberosAuthenticationEnabled()).andReturn(true).once();
- expect(properties.getSpnegoPrincipalName()).andReturn("HTTP/somehost.example.com").times(1);
- expect(properties.getSpnegoKeytabFilePath()).andReturn("/etc/security/keytabs/spnego.service.keytab").times(2);
-
- Configuration configuration = createMock(Configuration.class);
- expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
-
- replayAll();
-
- new AmbariKerberosTicketValidator(configuration);
-
- verifyAll();
- }
-}
\ No newline at end of file
[05/16] ambari git commit: AMBARI-18440. Add the option of providing
'aux jars' while creating LLAP package.
Posted by nc...@apache.org.
AMBARI-18440. Add the option of providing 'aux jars' while creating LLAP package.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/479ec12d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/479ec12d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/479ec12d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 479ec12db0a84942489349b8fb99c68988954ee6
Parents: 89eebb0
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Sep 21 16:38:46 2016 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Sep 21 17:19:20 2016 -0700
----------------------------------------------------------------------
.../0.12.0.2.0/package/scripts/hive_server_interactive.py | 4 ++++
.../HIVE/0.12.0.2.0/package/scripts/params_linux.py | 1 +
.../services/HIVE/configuration/hive-interactive-env.xml | 10 ++++++++++
3 files changed, 15 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/479ec12d/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index da05913..0251d27 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -268,6 +268,10 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
cmd += format(" --slider-keytab-dir .slider/keytabs/{params.hive_user}/ --slider-keytab "
"{llap_keytab_splits[4]} --slider-principal {params.hive_llap_principal}")
+ # Add the aux jars if they are specified. If empty, dont need to add this param.
+ if params.hive_aux_jars:
+ cmd+= format(" --auxjars {params.hive_aux_jars}")
+
# Append args.
llap_java_args = InlineTemplate(params.llap_app_java_opts).get_content()
cmd += format(" --args \" {llap_java_args}\"")
http://git-wip-us.apache.org/repos/asf/ambari/blob/479ec12d/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 5f78a1f..558aa95 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -609,6 +609,7 @@ if has_hive_interactive:
num_llap_nodes = config['configurations']['hive-interactive-env']['num_llap_nodes']
llap_daemon_container_size = config['configurations']['hive-interactive-site']['hive.llap.daemon.yarn.container.mb']
llap_log_level = config['configurations']['hive-interactive-env']['llap_log_level']
+ hive_aux_jars = default('/configurations/hive-interactive-env/hive_aux_jars', '')
hive_llap_io_mem_size = config['configurations']['hive-interactive-site']['hive.llap.io.memory.size']
llap_heap_size = config['configurations']['hive-interactive-env']['llap_heap_size']
llap_app_name = config['configurations']['hive-interactive-env']['llap_app_name']
http://git-wip-us.apache.org/repos/asf/ambari/blob/479ec12d/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
index f5ef902..b14cc8c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
@@ -202,6 +202,16 @@
<on-ambari-upgrade add="true"/>
</property>
<property>
+ <name>hive_aux_jars</name>
+ <value/>
+ <description>A list of comma separated JARs</description>
+ <display-name>Auxillary JAR list</display-name>
+ <on-ambari-upgrade add="true"/>
+ <value-attributes>
+ <empty-value-valid>true</empty-value-valid>
+ </value-attributes>
+ </property>
+ <property>
<name>llap_app_name</name>
<value>llap0</value>
<description>LLAP app name</description>
[09/16] ambari git commit: AMBARI-18444. On restarting service using
APIs in Ambari,
Ambari still says that service needs to be restarted on the UI (aonishuk)
Posted by nc...@apache.org.
AMBARI-18444. On restarting service using APIs in Ambari, Ambari still says that service needs to be restarted on the UI (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9b6bc215
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9b6bc215
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9b6bc215
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9b6bc2158d9dafcc836d5c301edc73602cc0edc0
Parents: f6769fe
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 22 15:38:46 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 22 15:38:46 2016 +0300
----------------------------------------------------------------------
.../src/main/python/ambari_agent/ActionQueue.py | 3 +-
.../test/python/ambari_agent/TestActionQueue.py | 58 ++++++++++++++++++++
2 files changed, 60 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6bc215/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 064e4f0..f104939 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -65,6 +65,7 @@ class ActionQueue(threading.Thread):
ROLE_COMMAND_STOP = 'STOP'
ROLE_COMMAND_CUSTOM_COMMAND = 'CUSTOM_COMMAND'
CUSTOM_COMMAND_RESTART = 'RESTART'
+ CUSTOM_COMMAND_START = ROLE_COMMAND_START
IN_PROGRESS_STATUS = 'IN_PROGRESS'
COMPLETED_STATUS = 'COMPLETED'
@@ -417,7 +418,7 @@ class ActionQueue(threading.Thread):
(command['roleCommand'] == self.ROLE_COMMAND_INSTALL and component in LiveStatus.CLIENT_COMPONENTS) or
(command['roleCommand'] == self.ROLE_COMMAND_CUSTOM_COMMAND and
'custom_command' in command['hostLevelParams'] and
- command['hostLevelParams']['custom_command'] == self.CUSTOM_COMMAND_RESTART)):
+ command['hostLevelParams']['custom_command'] in (self.CUSTOM_COMMAND_RESTART, self.CUSTOM_COMMAND_START))):
configHandler.write_actual_component(command['role'],
command['configurationTags'])
if 'clientsToUpdateConfigs' in command['hostLevelParams'] and command['hostLevelParams']['clientsToUpdateConfigs']:
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6bc215/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index 1805c9a..7d04d42 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -200,6 +200,19 @@ class TestActionQueue(TestCase):
'hostLevelParams':{'custom_command': 'RESTART'}
}
+ datanode_start_custom_command = {
+ 'commandType': 'EXECUTION_COMMAND',
+ 'role': u'DATANODE',
+ 'roleCommand': u'CUSTOM_COMMAND',
+ 'commandId': '1-1',
+ 'taskId': 9,
+ 'clusterName': u'cc',
+ 'serviceName': u'HDFS',
+ 'configurations':{'global' : {}},
+ 'configurationTags':{'global' : { 'tag': 'v123' }},
+ 'hostLevelParams':{'custom_command': 'START'}
+ }
+
status_command_for_alerts = {
"serviceName" : 'FLUME',
"commandType" : "STATUS_COMMAND",
@@ -776,6 +789,51 @@ class TestActionQueue(TestCase):
self.assertFalse(write_client_components_mock.called)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
+ @patch.object(ActualConfigHandler, "write_client_components")
+ @patch.object(ActualConfigHandler, "write_actual_component")
+ @patch.object(CustomServiceOrchestrator, "runCommand")
+ @patch("CommandStatusDict.CommandStatusDict")
+ @patch.object(ActionQueue, "status_update_callback")
+ def test_store_configuration_tags_on_custom_start_command(self, status_update_callback_mock,
+ command_status_dict_mock,
+ cso_runCommand_mock, write_actual_component_mock, write_client_components_mock):
+ custom_service_orchestrator_execution_result_dict = {
+ 'stdout': 'out',
+ 'stderr': 'stderr',
+ 'structuredOut' : '',
+ 'exitcode' : 0
+ }
+ cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
+
+ config = AmbariConfig()
+ tempdir = tempfile.gettempdir()
+ config.set('agent', 'prefix', tempdir)
+ config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+ config.set('agent', 'tolerate_download_failures', "true")
+ dummy_controller = MagicMock()
+ actionQueue = ActionQueue(config, dummy_controller)
+ actionQueue.execute_command(self.datanode_start_custom_command)
+ report = actionQueue.result()
+ expected = {'status': 'COMPLETED',
+ 'configurationTags': {'global': {'tag': 'v123'}},
+ 'stderr': 'stderr',
+ 'stdout': 'out\n\nCommand completed successfully!\n',
+ 'clusterName': u'cc',
+ 'structuredOut': '""',
+ 'roleCommand': u'CUSTOM_COMMAND',
+ 'serviceName': u'HDFS',
+ 'role': u'DATANODE',
+ 'actionId': '1-1',
+ 'taskId': 9,
+ 'customCommand': 'START',
+ 'exitCode': 0}
+ self.assertEqual(len(report['reports']), 1)
+ self.assertEqual(expected, report['reports'][0])
+
+ # Configuration tags should be updated on custom start command
+ self.assertTrue(write_actual_component_mock.called)
+
+ @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActionQueue, "status_update_callback")
@patch.object(CustomServiceOrchestrator, "requestComponentStatus")
@patch.object(CustomServiceOrchestrator, "requestComponentSecurityState")
[16/16] ambari git commit: Merge branch 'trunk' into
branch-dev-patch-upgrade
Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0bbdb4f3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0bbdb4f3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0bbdb4f3
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 0bbdb4f3a17526202be41b546db22c8bbbf85968
Parents: c0cee00 ed28ff4
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Sep 23 09:17:50 2016 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Sep 23 09:17:50 2016 -0400
----------------------------------------------------------------------
.../admin-web/app/scripts/services/Cluster.js | 2 +
.../src/main/python/ambari_agent/ActionQueue.py | 3 +-
.../test/python/ambari_agent/TestActionQueue.py | 58 ++++++++
.../libraries/functions/copy_tarball.py | 20 ++-
.../libraries/functions/setup_atlas_hook.py | 1 +
.../server/configuration/Configuration.java | 19 +++
.../ambari/server/controller/AmbariServer.java | 8 ++
.../internal/RequestResourceProvider.java | 84 ++++++------
.../authorization/RoleAuthorization.java | 2 +
.../ConfigureAmbariIdentitiesServerAction.java | 8 +-
.../server/upgrade/UpgradeCatalog250.java | 16 +++
.../main/resources/Ambari-DDL-Derby-CREATE.sql | 11 +-
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 11 +-
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 11 +-
.../resources/Ambari-DDL-Postgres-CREATE.sql | 11 +-
.../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 11 +-
.../resources/Ambari-DDL-SQLServer-CREATE.sql | 11 +-
.../1.6.1.2.2.0/package/scripts/params.py | 2 -
.../0.1.0/package/scripts/params.py | 1 -
.../ATLAS/0.1.0.2.3/package/scripts/params.py | 4 +-
.../0.1.0.2.3/package/scripts/service_check.py | 8 +-
.../FLUME/1.4.0.2.0/package/scripts/params.py | 1 -
.../HBASE/0.96.0.2.0/package/scripts/params.py | 1 -
.../HDFS/2.1.0.2.0/package/scripts/params.py | 1 -
.../HIVE/0.12.0.2.0/metainfo.xml | 1 +
.../HIVE/0.12.0.2.0/package/scripts/hive.py | 10 +-
.../package/scripts/hive_interactive.py | 2 +-
.../0.12.0.2.0/package/scripts/hive_server.py | 4 +-
.../package/scripts/hive_server_interactive.py | 8 +-
.../HIVE/0.12.0.2.0/package/scripts/params.py | 3 +-
.../0.12.0.2.0/package/scripts/params_linux.py | 1 +
.../KAFKA/0.8.1/package/scripts/params.py | 1 -
.../1.10.3-10/package/scripts/params.py | 1 -
.../KNOX/0.5.0.2.2/package/scripts/params.py | 1 -
.../MAHOUT/1.0.0.2.3/package/scripts/params.py | 1 -
.../OOZIE/4.0.0.2.0/metainfo.xml | 3 -
.../4.0.0.2.0/package/files/oozieSmoke2.sh | 5 -
.../4.0.0.2.0/package/scripts/oozie_service.py | 9 +-
.../OOZIE/4.0.0.2.0/package/scripts/params.py | 9 +-
.../4.0.0.2.0/package/scripts/service_check.py | 13 --
.../OOZIE/4.2.0.2.3/metainfo.xml | 3 -
.../PIG/0.12.0.2.0/package/scripts/params.py | 3 +-
.../0.12.0.2.0/package/scripts/service_check.py | 2 +-
.../RANGER/0.4.0/package/scripts/params.py | 1 -
.../SLIDER/0.60.0.2.2/package/scripts/params.py | 4 +-
.../0.60.0.2.2/package/scripts/params_linux.py | 2 -
.../0.60.0.2.2/package/scripts/service_check.py | 4 +-
.../1.2.1/package/scripts/job_history_server.py | 2 +-
.../SPARK/1.2.1/package/scripts/params.py | 4 +-
.../1.2.1/package/scripts/spark_service.py | 4 +-
.../2.0.0/package/scripts/job_history_server.py | 2 +-
.../SPARK2/2.0.0/package/scripts/params.py | 4 +-
.../2.0.0/package/scripts/spark_service.py | 4 +-
.../SQOOP/1.4.4.2.0/package/scripts/params.py | 1 -
.../STORM/0.9.1/package/scripts/params.py | 1 -
.../TEZ/0.4.0.2.1/package/scripts/params.py | 4 +-
.../0.4.0.2.1/package/scripts/pre_upgrade.py | 2 +-
.../0.4.0.2.1/package/scripts/service_check.py | 2 +-
.../2.1.0.2.0/package/scripts/historyserver.py | 12 +-
.../YARN/2.1.0.2.0/package/scripts/params.py | 3 +-
.../system_action_definitions.xml | 5 +
.../HDP/2.0.6/configuration/cluster-env.xml | 45 +++++++
.../2.0.6/hooks/before-ANY/scripts/params.py | 1 +
.../before-ANY/scripts/shared_initialization.py | 20 +--
.../2.0.6/hooks/before-START/scripts/params.py | 4 +-
.../scripts/shared_initialization.py | 4 +-
.../stacks/HDP/2.0.6/services/stack_advisor.py | 7 +-
.../stacks/HDP/2.1/services/OOZIE/metainfo.xml | 3 -
.../stacks/HDP/2.2/services/OOZIE/metainfo.xml | 3 -
.../HIVE/configuration/hive-interactive-env.xml | 10 ++
.../internal/RequestResourceProviderTest.java | 134 ++++++++++++++++---
.../security/TestAuthenticationFactory.java | 1 +
...AmbariAuthToLocalUserDetailsServiceTest.java | 9 ++
.../server/upgrade/UpgradeCatalog250Test.java | 117 ++++++++++++++--
.../stacks/2.0.6/HIVE/test_hive_server.py | 8 +-
.../2.0.6/OOZIE/test_oozie_service_check.py | 8 --
.../stacks/2.0.6/OOZIE/test_service_check.py | 16 ---
.../stacks/2.0.6/YARN/test_historyserver.py | 4 +-
.../stacks/2.0.6/common/test_stack_advisor.py | 12 +-
.../stacks/2.2/PIG/test_pig_service_check.py | 2 +-
.../stacks/2.3/ATLAS/test_service_check.py | 4 +-
.../stacks/2.5/common/test_stack_advisor.py | 5 +-
.../app/controllers/wizard/step7_controller.js | 2 +-
83 files changed, 622 insertions(+), 243 deletions(-)
----------------------------------------------------------------------
[03/16] ambari git commit: AMBARI-18406. Create authentication filter
to perform Kerberos authentication for Ambari [amended] (rlevas)
Posted by nc...@apache.org.
AMBARI-18406. Create authentication filter to perform Kerberos authentication for Ambari [amended] (rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3939afaf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3939afaf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3939afaf
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 3939afaf6e30d95186e5cce92712b4485c2013bf
Parents: 7e08470
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Sep 21 13:07:44 2016 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Sep 21 13:07:44 2016 -0400
----------------------------------------------------------------------
.../AmbariAuthToLocalUserDetailsServiceTest.java | 14 +++++++++-----
1 file changed, 9 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/3939afaf/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
index 4b93f2f..41664fd 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
@@ -25,6 +25,7 @@ import org.apache.ambari.server.security.authorization.User;
import org.apache.ambari.server.security.authorization.UserType;
import org.apache.ambari.server.security.authorization.Users;
import org.easymock.EasyMockSupport;
+import org.junit.Before;
import org.junit.Test;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
@@ -36,6 +37,14 @@ import java.util.Collections;
import static org.easymock.EasyMock.expect;
public class AmbariAuthToLocalUserDetailsServiceTest extends EasyMockSupport {
+ @Before
+ public void setup() {
+ // These system properties need to be set to properly configure the KerberosName object when
+ // a krb5.conf file is not available
+ System.setProperty("java.security.krb5.realm", "EXAMPLE.COM");
+ System.setProperty("java.security.krb5.kdc", "localhost");
+ }
+
@Test
public void loadUserByUsernameSuccess() throws Exception {
AmbariKerberosAuthenticationProperties properties = new AmbariKerberosAuthenticationProperties();
@@ -55,11 +64,6 @@ public class AmbariAuthToLocalUserDetailsServiceTest extends EasyMockSupport {
replayAll();
- // These system properties need to be set to properly configure the KerberosName object when
- // a krb5.conf file is not available
- System.setProperty("java.security.krb5.realm", "EXAMPLE.COM");
- System.setProperty("java.security.krb5.kdc", "localhost");
-
UserDetailsService userdetailsService = new AmbariAuthToLocalUserDetailsService(configuration, users);
UserDetails userDetails = userdetailsService.loadUserByUsername("user1@EXAMPLE.COM");
[06/16] ambari git commit: AMBARI-18426: Active ambari server check
required in ambari server JAR
Posted by nc...@apache.org.
AMBARI-18426: Active ambari server check required in ambari server JAR
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/07a3a3cf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/07a3a3cf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/07a3a3cf
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 07a3a3cff17d5df91e77fde0f6f5e0a9cac8aec5
Parents: 479ec12
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Wed Sep 21 11:53:47 2016 -0700
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Wed Sep 21 17:38:26 2016 -0700
----------------------------------------------------------------------
.../server/configuration/Configuration.java | 19 +++++++++++++++++++
.../ambari/server/controller/AmbariServer.java | 8 ++++++++
2 files changed, 27 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/07a3a3cf/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index f1058b6..2e850ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -2346,6 +2346,16 @@ public class Configuration {
public static final ConfigurationProperty<Integer> TASK_ID_LIST_LIMIT = new ConfigurationProperty<>(
"task.query.parameterlist.size", 999);
+ /**
+ * Indicates whether the current ambari server instance is the active instance.
+ * If this property is missing, the value will be considered to be true.
+ * If present, it should be explicitly set to "true" to set this as the active instance.
+ * Any other value will be taken as a false.
+ */
+ @Markdown(description = "Indicates whether the current ambari server instance is active or not.")
+ public static final ConfigurationProperty<Boolean> ACTIVE_INSTANCE = new ConfigurationProperty<>(
+ "active.instance", Boolean.TRUE);
+
private static final Logger LOG = LoggerFactory.getLogger(
Configuration.class);
@@ -4888,6 +4898,15 @@ public class Configuration {
}
/**
+ * Get whether the current ambari server instance the active instance
+ *
+ * @return true / false
+ */
+ public boolean isActiveInstance() {
+ return Boolean.parseBoolean(getProperty(ACTIVE_INSTANCE));
+ }
+
+ /**
* Generates a markdown table which includes:
* <ul>
* <li>Property key name</li>
http://git-wip-us.apache.org/repos/asf/ambari/blob/07a3a3cf/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 0e6e6b1..5e498f0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -941,6 +941,14 @@ public class AmbariServer {
try {
LOG.info("Getting the controller");
+ // check if this instance is the active instance
+ Configuration config = injector.getInstance(Configuration.class);
+ if (!config.isActiveInstance()) {
+ String errMsg = "This instance of ambari server is not designated as active. Cannot start ambari server." +
+ "The property active.instance is set to false in ambari.properties";
+ throw new AmbariException(errMsg);
+ }
+
setupProxyAuth();
injector.getInstance(GuiceJpaInitializer.class);
[07/16] ambari git commit: AMBARI-18428. Atlas service check never
fails (aonishuk)
Posted by nc...@apache.org.
AMBARI-18428. Atlas service check never fails (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/169cb789
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/169cb789
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/169cb789
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 169cb78912faaccd8807f44e87c05c79cc6844f8
Parents: 07a3a3c
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 22 11:23:11 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 22 11:23:11 2016 +0300
----------------------------------------------------------------------
.../ATLAS/0.1.0.2.3/package/scripts/params.py | 4 ++--
.../ATLAS/0.1.0.2.3/package/scripts/service_check.py | 8 ++------
2 files changed, 4 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/169cb789/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 3081295..1a73634 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -181,9 +181,9 @@ smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
security_check_status_file = format('{log_dir}/security_check.status')
if security_enabled:
- smoke_cmd = format('curl --negotiate -u : -b ~/cookiejar.txt -c ~/cookiejar.txt -s -o /dev/null -w "%{{http_code}}" {metadata_protocol}://{metadata_host}:{metadata_port}/')
+ smoke_cmd = format('curl -k --negotiate -u : -b ~/cookiejar.txt -c ~/cookiejar.txt -s -o /dev/null -w "%{{http_code}}" {metadata_protocol}://{metadata_host}:{metadata_port}/')
else:
- smoke_cmd = format('curl -s -o /dev/null -w "%{{http_code}}" {metadata_protocol}://{metadata_host}:{metadata_port}/')
+ smoke_cmd = format('curl -k -s -o /dev/null -w "%{{http_code}}" {metadata_protocol}://{metadata_host}:{metadata_port}/')
# hbase
hbase_conf_dir = "/etc/hbase/conf"
http://git-wip-us.apache.org/repos/asf/ambari/blob/169cb789/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/service_check.py
index 05ffc14..8e27b38 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/service_check.py
@@ -35,12 +35,8 @@ class AtlasServiceCheck(Script):
Execute(format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}"),
user=params.smoke_test_user)
- try:
- Execute(params.smoke_cmd, user=params.smoke_test_user, tries = 5,
- try_sleep = 10)
- Logger.info('Atlas server up and running')
- except:
- Logger.debug('Atlas server not running')
+ Execute(params.smoke_cmd, user=params.smoke_test_user, tries = 5,
+ try_sleep = 10)
if __name__ == "__main__":
[15/16] ambari git commit: AMBARI-18438. Add granular flags for
sysprepped clusters to copy tarballs, Oozie share lib, fast jar,
and create users (alejandro)
Posted by nc...@apache.org.
AMBARI-18438. Add granular flags for sysprepped clusters to copy tarballs, Oozie share lib, fast jar, and create users (alejandro)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ed28ff48
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ed28ff48
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ed28ff48
Branch: refs/heads/branch-dev-patch-upgrade
Commit: ed28ff483f8afb994932a62ea177f06b31fae36f
Parents: 33a573c
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Sep 21 13:57:35 2016 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Sep 22 15:54:38 2016 -0700
----------------------------------------------------------------------
.../libraries/functions/copy_tarball.py | 20 ++++++---
.../libraries/functions/setup_atlas_hook.py | 1 +
.../1.6.1.2.2.0/package/scripts/params.py | 2 -
.../0.1.0/package/scripts/params.py | 1 -
.../FLUME/1.4.0.2.0/package/scripts/params.py | 1 -
.../HBASE/0.96.0.2.0/package/scripts/params.py | 1 -
.../HDFS/2.1.0.2.0/package/scripts/params.py | 1 -
.../HIVE/0.12.0.2.0/package/scripts/hive.py | 10 ++---
.../package/scripts/hive_interactive.py | 2 +-
.../0.12.0.2.0/package/scripts/hive_server.py | 4 +-
.../package/scripts/hive_server_interactive.py | 4 +-
.../HIVE/0.12.0.2.0/package/scripts/params.py | 3 +-
.../KAFKA/0.8.1/package/scripts/params.py | 1 -
.../1.10.3-10/package/scripts/params.py | 1 -
.../KNOX/0.5.0.2.2/package/scripts/params.py | 1 -
.../MAHOUT/1.0.0.2.3/package/scripts/params.py | 1 -
.../4.0.0.2.0/package/scripts/oozie_service.py | 9 ++--
.../OOZIE/4.0.0.2.0/package/scripts/params.py | 9 +++-
.../PIG/0.12.0.2.0/package/scripts/params.py | 3 +-
.../0.12.0.2.0/package/scripts/service_check.py | 2 +-
.../RANGER/0.4.0/package/scripts/params.py | 1 -
.../SLIDER/0.60.0.2.2/package/scripts/params.py | 4 +-
.../0.60.0.2.2/package/scripts/params_linux.py | 2 -
.../0.60.0.2.2/package/scripts/service_check.py | 4 +-
.../1.2.1/package/scripts/job_history_server.py | 2 +-
.../SPARK/1.2.1/package/scripts/params.py | 4 +-
.../1.2.1/package/scripts/spark_service.py | 4 +-
.../2.0.0/package/scripts/job_history_server.py | 2 +-
.../SPARK2/2.0.0/package/scripts/params.py | 4 +-
.../2.0.0/package/scripts/spark_service.py | 4 +-
.../SQOOP/1.4.4.2.0/package/scripts/params.py | 1 -
.../STORM/0.9.1/package/scripts/params.py | 1 -
.../TEZ/0.4.0.2.1/package/scripts/params.py | 4 +-
.../0.4.0.2.1/package/scripts/pre_upgrade.py | 2 +-
.../0.4.0.2.1/package/scripts/service_check.py | 2 +-
.../2.1.0.2.0/package/scripts/historyserver.py | 12 +++---
.../YARN/2.1.0.2.0/package/scripts/params.py | 3 +-
.../HDP/2.0.6/configuration/cluster-env.xml | 45 ++++++++++++++++++++
.../2.0.6/hooks/before-ANY/scripts/params.py | 1 +
.../before-ANY/scripts/shared_initialization.py | 20 +++++----
.../2.0.6/hooks/before-START/scripts/params.py | 4 +-
.../scripts/shared_initialization.py | 4 +-
.../stacks/2.0.6/HIVE/test_hive_server.py | 8 ++--
.../stacks/2.0.6/YARN/test_historyserver.py | 4 +-
.../stacks/2.2/PIG/test_pig_service_check.py | 2 +-
45 files changed, 141 insertions(+), 80 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 2626990..0355685 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -18,7 +18,7 @@ limitations under the License.
"""
-__all__ = ["copy_to_hdfs", ]
+__all__ = ["copy_to_hdfs", "get_sysprep_skip_copy_tarballs_hdfs"]
import os
import uuid
@@ -65,6 +65,16 @@ TARBALL_MAP = {
}
+def get_sysprep_skip_copy_tarballs_hdfs():
+ import params
+ host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+ # By default, copy the tarballs to HDFS. If the cluster is sysprepped, then set based on the config.
+ sysprep_skip_copy_tarballs_hdfs = False
+ if host_sys_prepped:
+ sysprep_skip_copy_tarballs_hdfs = default("/cluster-env/sysprep_skip_copy_tarballs_hdfs", False)
+ return sysprep_skip_copy_tarballs_hdfs
+
def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None):
"""
For a given tarball name, get the source and destination paths to use.
@@ -189,7 +199,7 @@ def _get_single_version_from_stack_select():
def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=None, custom_dest_file=None, force_execute=False,
- use_upgrading_version_during_upgrade=True, replace_existing_files=False, host_sys_prepped=False):
+ use_upgrading_version_during_upgrade=True, replace_existing_files=False, skip=False):
"""
:param name: Tarball name, e.g., tez, hive, pig, sqoop.
:param user_group: Group to own the directory.
@@ -199,7 +209,7 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
:param custom_dest_file: Override the destination file path
:param force_execute: If true, will execute the HDFS commands immediately, otherwise, will defer to the calling function.
:param use_upgrading_version_during_upgrade: If true, will use the version going to during upgrade. Otherwise, use the CURRENT (source) version.
- :param host_sys_prepped: If true, tarballs will not be copied as the cluster deployment uses prepped VMs.
+ :param skip: If true, tarballs will not be copied as the cluster deployment uses prepped VMs.
:return: Will return True if successful, otherwise, False.
"""
import params
@@ -212,8 +222,8 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
Logger.error("Could not copy tarball {0} due to a missing or incorrect parameter.".format(str(name)))
return False
- if host_sys_prepped:
- Logger.warning("Skipping copying {0} to {1} for {2} as its a sys_prepped host.".format(str(source_file), str(dest_file), str(name)))
+ if skip:
+ Logger.warning("Skipping copying {0} to {1} for {2} as it is a sys prepped host.".format(str(source_file), str(dest_file), str(name)))
return True
Logger.info("Source file: {0} , Dest file in HDFS: {1}".format(source_file, dest_file))
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
index 591c064..a1d2f95 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
@@ -177,5 +177,6 @@ def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
def install_atlas_hook_packages(atlas_plugin_package, atlas_ubuntu_plugin_package, host_sys_prepped,
agent_stack_retry_on_unavailability, agent_stack_retry_count):
if not host_sys_prepped:
+ # This will install packages like atlas-metadata-${service}-plugin needed for Falcon and Hive.
Package(atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else atlas_plugin_package,
retry_on_repo_unavailability=agent_stack_retry_on_unavailability, retry_count=agent_stack_retry_count)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 4c111f1..a8a7327 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -172,8 +172,6 @@ if security_enabled:
else:
kinit_cmd = ""
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
#for create_hdfs_directory
hostname = status_params.hostname
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 6934924..f04f5c2 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -190,7 +190,6 @@ java64_home = config['hostLevelParams']['java_home']
java_version = expect("/hostLevelParams/java_version", int)
metrics_collector_heapsize = default('/configurations/ams-env/metrics_collector_heapsize', "512")
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
skip_disk_metrics_patterns = default("/configurations/ams-env/timeline.metrics.skip.disk.metrics.patterns", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
index d3a9294..9c79909 100644
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
@@ -33,7 +33,6 @@ config = Script.get_config()
stack_root = Script.get_stack_root()
stack_name = default("/hostLevelParams/stack_name", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
version = default("/commandParams/version", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
index f3208ce..e0607f3 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
@@ -25,5 +25,4 @@ if OSCheck.is_windows_family():
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
retryAble = default("/commandParams/command_retry_enabled", False)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
index 7514918..25231f9 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
@@ -24,6 +24,5 @@ if OSCheck.is_windows_family():
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
nfsgateway_heapsize = config['configurations']['hadoop-env']['nfsgateway_heapsize']
retryAble = default("/commandParams/command_retry_enabled", False)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 3249dd4..ec64200 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -137,8 +137,8 @@ def hive(name=None):
# *********************************
# if copy tarball to HDFS feature supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
- copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
- copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
+ copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
# Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
# This can use a different source and dest location to account
@@ -148,14 +148,14 @@ def hive(name=None):
file_mode=params.tarballs_mode,
custom_source_file=params.pig_tar_source,
custom_dest_file=params.pig_tar_dest_file,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
copy_to_hdfs("hive",
params.user_group,
params.hdfs_user,
file_mode=params.tarballs_mode,
custom_source_file=params.hive_tar_source,
custom_dest_file=params.hive_tar_dest_file,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
wildcard_tarballs = ["sqoop", "hadoop_streaming"]
for tarball_name in wildcard_tarballs:
@@ -176,7 +176,7 @@ def hive(name=None):
file_mode=params.tarballs_mode,
custom_source_file=source_file,
custom_dest_file=dest_file,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
# ******* End Copy Tarballs *******
# *********************************
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 6511d0e..888b920 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -82,7 +82,7 @@ def hive_interactive(name=None):
params.user_group,
params.hdfs_user,
file_mode=params.tarballs_mode,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 7c886b2..614b2a9 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -126,13 +126,13 @@ class HiveServerDefault(HiveServer):
"mapreduce",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
resource_created = copy_to_hdfs(
"tez",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped) or resource_created
+ skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 0251d27..0e00c3a 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -94,13 +94,13 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
"hive2",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
resource_created = copy_to_hdfs(
"tez_hive2",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped) or resource_created
+ skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index f10a3f3..895ec81 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -19,11 +19,12 @@ limitations under the License.
"""
from ambari_commons import OSCheck
from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
if OSCheck.is_windows_family():
from params_windows import *
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
retryAble = default("/commandParams/command_retry_enabled", False)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index f631ac9..a18c295 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -48,7 +48,6 @@ version = default("/commandParams/version", None)
# Version that is CURRENT.
current_version = default("/hostLevelParams/current_version", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
index fe367b6..57f062a 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
@@ -44,7 +44,6 @@ kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
configurations = None
keytab_details = None
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
index 14e021d..ad1a1dc 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
@@ -26,5 +26,4 @@ if OSCheck.is_windows_family():
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
retryAble = default("/commandParams/command_retry_enabled", False)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 8ef3ba8..91911f3 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -33,7 +33,6 @@ tmp_dir = Script.get_tmp_dir()
stack_root = Script.get_stack_root()
stack_name = default("/hostLevelParams/stack_name", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
index 4740131..5c97727 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
@@ -35,6 +35,8 @@ from resource_management.libraries.providers.hdfs_resource import WebHDFSUtil
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
+from resource_management.core import Logger
+
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def oozie_service(action='start', upgrade_type=None):
import params
@@ -118,10 +120,9 @@ def oozie_service(action = 'start', upgrade_type=None):
Execute(kinit_if_needed,
user = params.oozie_user,
)
-
-
- if params.host_sys_prepped:
- print "Skipping creation of oozie sharelib as host is sys prepped"
+
+ if params.sysprep_skip_copy_oozie_share_lib_to_hdfs:
+ Logger.info("Skipping creation of oozie sharelib as host is sys prepped")
# Copy current hive-site to hdfs:/user/oozie/share/lib/spark/
params.HdfsResource(format("{hdfs_share_dir}/lib/spark/hive-site.xml"),
action="create_on_execute",
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
index 28d654e..e5d674a 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
@@ -20,6 +20,7 @@ limitations under the License.
from ambari_commons import OSCheck
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
if OSCheck.is_windows_family():
from params_windows import *
@@ -29,4 +30,10 @@ else:
java_home = config['hostLevelParams']['java_home']
java_version = expect("/hostLevelParams/java_version", int)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
\ No newline at end of file
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+# By default, copy the tarballs to HDFS. If the cluster is sysprepped, then set based on the config.
+sysprep_skip_copy_oozie_share_lib_to_hdfs = False
+if host_sys_prepped:
+ sysprep_skip_copy_oozie_share_lib_to_hdfs = default("/cluster-env/sysprep_skip_copy_oozie_share_lib_to_hdfs", False)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
index 36541e7..3aebda0 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
@@ -20,11 +20,12 @@ Ambari Agent
"""
from ambari_commons import OSCheck
from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
if OSCheck.is_windows_family():
from params_windows import *
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index e029092..ba4082e 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -110,7 +110,7 @@ class PigServiceCheckLinux(PigServiceCheck):
resource_created = copy_to_hdfs(
"tez", params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index e63627d..5b0dcea 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -46,7 +46,6 @@ stack_root = Script.get_stack_root()
stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index 842e5ed..c03329c 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -23,14 +23,13 @@ from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
if OSCheck.is_windows_family():
from params_windows import *
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
# server configurations
config = Script.get_config()
@@ -38,6 +37,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
version = default("/commandParams/version", None)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
#hadoop params
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 6bf41b4..5bbf3b4 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -50,8 +50,6 @@ slider_tar_gz = format('{slider_lib_dir}/slider.tar.gz')
user_group = config['configurations']['cluster-env']['user_group']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
security_enabled = config['configurations']['cluster-env']['security_enabled']
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
index 468dfb1..8582b5e 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
@@ -41,9 +41,9 @@ class SliderServiceCheck(Script):
def service_check(self, env):
import params
env.set_params(params)
-
+
if params.stack_version_formatted and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted):
- copy_to_hdfs("slider", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ copy_to_hdfs("slider", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
smokeuser_kinit_cmd = format(
"{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal};") if params.security_enabled else ""
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
index 2825f35..154f430 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
@@ -86,7 +86,7 @@ class JobHistoryServer(Script):
"tez",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index 4e525db..3a1c482 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -26,6 +26,7 @@ from resource_management.libraries.functions.stack_features import check_stack_f
from resource_management.libraries.functions.constants import StackFeature
from resource_management.libraries.functions import conf_select, stack_select
from resource_management.libraries.functions.get_stack_version import get_stack_version
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
@@ -59,7 +60,8 @@ stack_version_unformatted = config['hostLevelParams']['stack_version']
if upgrade_direction == Direction.DOWNGRADE:
stack_version_unformatted = config['commandParams']['original_stack'].split("-")[1]
stack_version_formatted = format_stack_version(stack_version_unformatted)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
version = default("/commandParams/version", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
index 82a010a..31a296a 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
@@ -40,7 +40,7 @@ def spark_service(name, upgrade_type=None, action=None):
if name == 'jobhistoryserver' and effective_version and check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
# copy spark-hdp-assembly.jar to hdfs
- copy_to_hdfs("spark", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ copy_to_hdfs("spark", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
# create spark history directory
params.HdfsResource(params.spark_history_dir,
type="directory",
@@ -59,7 +59,7 @@ def spark_service(name, upgrade_type=None, action=None):
# Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
# need to copy the tarball, otherwise, copy it.
if params.stack_version_formatted and check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.stack_version_formatted):
- resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
index 3a0cff5..154c83d 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
@@ -86,7 +86,7 @@ class JobHistoryServer(Script):
"tez",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index e461186..e49756d 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -25,6 +25,7 @@ from resource_management.libraries.functions.stack_features import check_stack_f
from resource_management.libraries.functions.constants import StackFeature
from resource_management.libraries.functions import conf_select, stack_select
from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
@@ -49,7 +50,8 @@ stack_name = status_params.stack_name
stack_root = Script.get_stack_root()
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
version = default("/commandParams/version", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
index 590b299..1cbca8b 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
@@ -60,7 +60,7 @@ def spark_service(name, upgrade_type=None, action=None):
source_dir=params.spark_home+"/jars"
tmp_archive_file=get_tarball_paths("spark2")[1]
make_tarfile(tmp_archive_file, source_dir)
- copy_to_hdfs("spark2", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ copy_to_hdfs("spark2", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
# create spark history directory
params.HdfsResource(params.spark_history_dir,
type="directory",
@@ -79,7 +79,7 @@ def spark_service(name, upgrade_type=None, action=None):
# Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
# need to copy the tarball, otherwise, copy it.
if params.stack_version_formatted and check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.stack_version_formatted):
- resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
index ec71506..61573ee 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
@@ -24,5 +24,4 @@ if OSCheck.is_windows_family():
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
index f10a3f3..5d53de8 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params.py
@@ -25,5 +25,4 @@ if OSCheck.is_windows_family():
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
retryAble = default("/commandParams/command_retry_enabled", False)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
index 1e591f4..c01dc62 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
@@ -19,11 +19,11 @@ limitations under the License.
"""
from ambari_commons import OSCheck
from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
if OSCheck.is_windows_family():
from params_windows import *
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
index 04d8be1..d182efe 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
@@ -48,7 +48,7 @@ class TezPreUpgrade(Script):
params.user_group,
params.hdfs_user,
use_upgrading_version_during_upgrade=False,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
if resource_created:
params.HdfsResource(None, action="execute")
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index af429f6..c903d35 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -69,7 +69,7 @@ class TezServiceCheckLinux(TezServiceCheck):
)
if params.stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version_formatted):
- copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
params.HdfsResource(None, action = "execute")
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index d5bdc6b..8f5d380 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -80,9 +80,9 @@ class HistoryServerDefault(HistoryServer):
conf_select.select(params.stack_name, "hadoop", params.version)
stack_select.select("hadoop-mapreduce-historyserver", params.version)
# MC Hammer said, "Can't touch this"
- copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
- copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
- copy_to_hdfs("slider", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+ copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
+ copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
+ copy_to_hdfs("slider", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
params.HdfsResource(None, action="execute")
def start(self, env, upgrade_type=None):
@@ -96,17 +96,17 @@ class HistoryServerDefault(HistoryServer):
"mapreduce",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped)
+ skip=params.sysprep_skip_copy_tarballs_hdfs)
resource_created = copy_to_hdfs(
"tez",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped) or resource_created
+ skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
resource_created = copy_to_hdfs(
"slider",
params.user_group,
params.hdfs_user,
- host_sys_prepped=params.host_sys_prepped) or resource_created
+ skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
if resource_created:
params.HdfsResource(None, action="execute")
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
index 073e84f..d0ad6f6 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
@@ -21,11 +21,12 @@ Ambari Agent
"""
from ambari_commons import OSCheck
from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
if OSCheck.is_windows_family():
from params_windows import *
else:
from params_linux import *
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
retryAble = default("/commandParams/command_retry_enabled", False)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index 81cb175..0d313cc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -81,6 +81,51 @@
<on-ambari-upgrade add="true"/>
</property>
<property>
+ <name>sysprep_skip_create_users_and_groups</name>
+ <display-name>Whether to skip creating users and groups in a sysprepped cluster</display-name>
+ <value>false</value>
+ <property-type>ADDITIONAL_USER_PROPERTY</property-type>
+ <description>Whether to skip creating users and groups in a sysprepped cluster</description>
+ <value-attributes>
+ <overridable>true</overridable>
+ <type>boolean</type>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+ <property>
+ <name>sysprep_skip_copy_fast_jar_hdfs</name>
+ <display-name>Whether to skip copying the tarballs to HDFS on a sysprepped cluster</display-name>
+ <value>false</value>
+ <description>Whether to skip copying the tarballs to HDFS on a sysprepped cluster, during both fresh install and stack upgrade</description>
+ <value-attributes>
+ <overridable>true</overridable>
+ <type>boolean</type>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+ <property>
+ <name>sysprep_skip_copy_tarballs_hdfs</name>
+ <display-name>Whether to skip copying the tarballs to HDFS on a sysprepped cluster</display-name>
+ <value>false</value>
+ <description>Whether to skip copying the tarballs to HDFS on a sysprepped cluster, during both fresh install and stack upgrade</description>
+ <value-attributes>
+ <overridable>true</overridable>
+ <type>boolean</type>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+ <property>
+ <name>sysprep_skip_copy_oozie_share_lib_to_hdfs</name>
+ <display-name>Whether to skip copying the Oozie share lib to HDFS on sysprepped cluster</display-name>
+ <value>false</value>
+ <description>Whether to skip copying the Oozie share lib to HDFS on sysprepped cluster, during both fresh install and stack upgrade</description>
+ <value-attributes>
+ <overridable>true</overridable>
+ <type>boolean</type>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+ <property>
<name>smokeuser</name>
<display-name>Smoke User</display-name>
<value>ambari-qa</value>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 8b52ca1..f19c321 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -200,6 +200,7 @@ proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
ranger_group = config['configurations']['ranger-env']['ranger_group']
dfs_cluster_administrators_group = config['configurations']['hdfs-site']["dfs.cluster.administrators"]
+sysprep_skip_create_users_and_groups = default("/configurations/cluster-env/sysprep_skip_create_users_and_groups", False)
ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
fetch_nonlocal_groups = config['configurations']['cluster-env']["fetch_nonlocal_groups"]
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index 1a7d21a..320872e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -30,7 +30,12 @@ def setup_users():
Creates users before cluster installation
"""
import params
- should_create_users_and_groups = not params.host_sys_prepped and not params.ignore_groupsusers_create
+
+ should_create_users_and_groups = False
+ if params.host_sys_prepped:
+ should_create_users_and_groups = not params.sysprep_skip_create_users_and_groups
+ else:
+ should_create_users_and_groups = not params.ignore_groupsusers_create
if should_create_users_and_groups:
for group in params.group_list:
@@ -60,19 +65,16 @@ def setup_users():
create_parents = True,
cd_access="a",
)
- if not params.host_sys_prepped and params.override_uid == "true":
+ if params.override_uid == "true":
set_uid(params.hbase_user, params.hbase_user_dirs)
else:
- Logger.info('Skipping setting uid for hbase user as host is sys prepped')
- pass
+ Logger.info('Skipping setting uid for hbase user as host is sys prepped')
- if not params.host_sys_prepped:
+ if should_create_users_and_groups:
if params.has_namenode:
- if should_create_users_and_groups:
- create_dfs_cluster_admins()
+ create_dfs_cluster_admins()
if params.has_tez and params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.3') >= 0:
- if should_create_users_and_groups:
- create_tez_am_view_acls()
+ create_tez_am_view_acls()
else:
Logger.info('Skipping setting dfs cluster admin and tez view acls as host is sys prepped')
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 45eab2f..c678a72 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -33,7 +33,9 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
config = Script.get_config()
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+# Whether to skip copying fast-hdfs-resource.jar to /var/lib/ambari-agent/lib/
+# This is required if tarballs are going to be copied to HDFS, so set to False
+sysprep_skip_copy_fast_jar_hdfs = default("/configurations/cluster-env/sysprep_skip_copy_fast_jar_hdfs", False)
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index 8f845d2..2182fd1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -61,8 +61,8 @@ def setup_hadoop():
else:
tc_owner = params.hdfs_user
- # if WebHDFS is not enabled we need this jar to create hadoop folders.
- if params.host_sys_prepped:
+ # if WebHDFS is not enabled we need this jar to create hadoop folders and copy tarballs to HDFS.
+ if params.sysprep_skip_copy_fast_jar_hdfs:
print "Skipping copying of fast-hdfs-resource.jar as host is sys prepped"
elif params.dfs_type == 'HCFS' or not WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
# for source-code of jar goto contrib/fast-hdfs-resource
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 3e5dc7e..b3e2494 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -947,8 +947,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
self.assertResourceCalled('Execute',
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', version), sudo=True,)
- copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", host_sys_prepped=False)
- copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", host_sys_prepped=False)
+ copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", skip=False)
+ copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", skip=False)
self.assertEquals(2, copy_to_hdfs_mock.call_count)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
@@ -990,8 +990,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
self.assertResourceCalled('Execute',
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', version), sudo=True,)
- copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", host_sys_prepped=False)
- copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", host_sys_prepped=False)
+ copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", skip=False)
+ copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", skip=False)
self.assertEquals(2, copy_to_hdfs_mock.call_count)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 9ce5530..643f946 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -873,8 +873,8 @@ class TestHistoryServer(RMFTestCase):
mocks_dict = mocks_dict)
self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-mapreduce-historyserver', version), sudo=True)
- self.assertTrue(call("tez", "hadoop", "hdfs", host_sys_prepped=False) in copy_to_hdfs_mock.call_args_list)
- self.assertTrue(call("slider", "hadoop", "hdfs", host_sys_prepped=False) in copy_to_hdfs_mock.call_args_list)
+ self.assertTrue(call("tez", "hadoop", "hdfs", skip=False) in copy_to_hdfs_mock.call_args_list)
+ self.assertTrue(call("slider", "hadoop", "hdfs", skip=False) in copy_to_hdfs_mock.call_args_list)
# From call to conf_select.get_hadoop_conf_dir()
self.assert_call_to_get_hadoop_conf_dir()
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed28ff48/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index 3abc601..d3508e1 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -155,7 +155,7 @@ class TestPigServiceCheck(RMFTestCase):
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
- copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", host_sys_prepped=False)
+ copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", skip=False)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
[08/16] ambari git commit: AMBARI-18442. Python UT fail on trunk
(aonishuk)
Posted by nc...@apache.org.
AMBARI-18442. Python UT fail on trunk (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f6769feb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f6769feb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f6769feb
Branch: refs/heads/branch-dev-patch-upgrade
Commit: f6769feb248608c0f9b3d8dee62c1943710ac4c7
Parents: 169cb78
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 22 13:56:14 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 22 13:56:14 2016 +0300
----------------------------------------------------------------------
.../src/test/python/stacks/2.3/ATLAS/test_service_check.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6769feb/ambari-server/src/test/python/stacks/2.3/ATLAS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_service_check.py b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_service_check.py
index b9dd736..43ce520 100644
--- a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_service_check.py
@@ -34,7 +34,7 @@ class TestAtlasCheck(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- self.assertResourceCalled('Execute', 'curl -s -o /dev/null -w "%{http_code}" http://c6401.ambari.apache.org:21000/',
+ self.assertResourceCalled('Execute', 'curl -k -s -o /dev/null -w "%{http_code}" http://c6401.ambari.apache.org:21000/',
user = 'ambari-qa',
tries = 5,
try_sleep = 10)
@@ -54,7 +54,7 @@ class TestAtlasCheck(RMFTestCase):
'/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM',
user = 'ambari-qa')
- self.assertResourceCalled('Execute', 'curl --negotiate -u : -b ~/cookiejar.txt -c ~/cookiejar.txt -s -o /dev/null -w "%{http_code}" https://c6401.ambari.apache.org:21443/',
+ self.assertResourceCalled('Execute', 'curl -k --negotiate -u : -b ~/cookiejar.txt -c ~/cookiejar.txt -s -o /dev/null -w "%{http_code}" https://c6401.ambari.apache.org:21443/',
user = 'ambari-qa',
tries = 5,
try_sleep = 10)
[13/16] ambari git commit: AMBARI-18433. Enforce granular role-based
access control for custom actions (rlevas)
Posted by nc...@apache.org.
AMBARI-18433. Enforce granular role-based access control for custom actions (rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ac0c66e9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ac0c66e9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ac0c66e9
Branch: refs/heads/branch-dev-patch-upgrade
Commit: ac0c66e986e14db6a746dfe1b84f36a662dacfbb
Parents: 39ed8e4
Author: Robert Levas <rl...@hortonworks.com>
Authored: Thu Sep 22 12:36:52 2016 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Sep 22 12:36:52 2016 -0400
----------------------------------------------------------------------
.../admin-web/app/scripts/services/Cluster.js | 2 +
.../internal/RequestResourceProvider.java | 84 ++++++------
.../authorization/RoleAuthorization.java | 2 +
.../server/upgrade/UpgradeCatalog250.java | 16 +++
.../main/resources/Ambari-DDL-Derby-CREATE.sql | 11 +-
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 11 +-
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 11 +-
.../resources/Ambari-DDL-Postgres-CREATE.sql | 11 +-
.../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 11 +-
.../resources/Ambari-DDL-SQLServer-CREATE.sql | 11 +-
.../system_action_definitions.xml | 5 +
.../internal/RequestResourceProviderTest.java | 134 ++++++++++++++++---
.../security/TestAuthenticationFactory.java | 1 +
.../server/upgrade/UpgradeCatalog250Test.java | 117 ++++++++++++++--
14 files changed, 340 insertions(+), 87 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
index c17c36d..02c231a 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
@@ -67,6 +67,7 @@ angular.module('ambariAdminConsole')
"CLUSTER.TOGGLE_ALERTS",
"CLUSTER.TOGGLE_KERBEROS",
"CLUSTER.UPGRADE_DOWNGRADE_STACK",
+ "CLUSTER.RUN_CUSTOM_COMMAND",
"AMBARI.ADD_DELETE_CLUSTERS",
"AMBARI.ASSIGN_ROLES",
"AMBARI.EDIT_STACK_REPOS",
@@ -76,6 +77,7 @@ angular.module('ambariAdminConsole')
"AMBARI.MANAGE_USERS",
"AMBARI.MANAGE_VIEWS",
"AMBARI.RENAME_CLUSTER",
+ "AMBARI.RUN_CUSTOM_COMMAND",
"SERVICE.SET_SERVICE_USERS_GROUPS"
],
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
index d38234f..8c1bc57 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -186,53 +186,61 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
String clusterName = actionRequest.getClusterName();
- if(clusterName == null) {
- String actionName = actionRequest.getActionName();
+ ResourceType resourceType;
+ Long resourceId;
- // Ensure that the actionName is not null or empty. A null actionName will result in
- // a NPE at when getting the action definition. The string "_unknown_action_" should not
- // result in a valid action definition and should be easy to understand in any error message
- // that gets displayed or logged due to an authorization issue.
- if(StringUtils.isEmpty(actionName)) {
- actionName = "_unknown_action_";
- }
+ if (StringUtils.isEmpty(clusterName)) {
+ resourceType = ResourceType.AMBARI;
+ resourceId = null;
+ } else {
+ resourceType = ResourceType.CLUSTER;
+ resourceId = getClusterResourceId(clusterName);
+ }
- ActionDefinition actionDefinition = getManagementController().getAmbariMetaInfo().getActionDefinition(actionName);
- Set<RoleAuthorization> permissions = (actionDefinition == null) ? null : actionDefinition.getPermissions();
+ if (actionRequest.isCommand()) {
+ String commandName = actionRequest.getCommandName();
- if(permissions == null) {
- if (!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.SERVICE_RUN_CUSTOM_COMMAND)) {
- throw new AuthorizationException(String.format("The authenticated user is not authorized to execute the '%s'command.", actionName));
- }
+ if (StringUtils.isEmpty(commandName)) {
+ commandName = "_unknown_command_";
}
- else {
- // Since we cannot tell whether the action is to be exectued for the system or a
- // non-disclosed cluster, specify that the resource is a CLUSTER with no resource id.
- // This should ensure that a user with a role for any cluster with the appropriate
- // permissions or an Ambari administrator can execute the command.
- if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, null, permissions)) {
- throw new AuthorizationException(String.format("The authenticated user is not authorized to execute the '%s'command.", actionName));
+
+ if (commandName.endsWith("_SERVICE_CHECK")) {
+ if (!AuthorizationHelper.isAuthorized(resourceType, resourceId, RoleAuthorization.SERVICE_RUN_SERVICE_CHECK)) {
+ throw new AuthorizationException("The authenticated user is not authorized to execute service checks.");
+ }
+ } else if (commandName.equals("DECOMMISSION")) {
+ if (!AuthorizationHelper.isAuthorized(resourceType, resourceId, RoleAuthorization.SERVICE_DECOMMISSION_RECOMMISSION)) {
+ throw new AuthorizationException("The authenticated user is not authorized to decommission services.");
+ }
+ } else {
+ if (!AuthorizationHelper.isAuthorized(resourceType, resourceId, RoleAuthorization.SERVICE_RUN_CUSTOM_COMMAND)) {
+ throw new AuthorizationException(String.format("The authenticated user is not authorized to execute the command, %s.",
+ commandName));
}
}
- }
- else if(actionRequest.isCommand()) {
- if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER,
- getClusterResourceId(clusterName), RoleAuthorization.SERVICE_RUN_CUSTOM_COMMAND)) {
- throw new AuthorizationException("The authenticated user is not authorized to execute custom service commands.");
- }
- }
- else {
+ } else {
String actionName = actionRequest.getActionName();
- // actionName is expected to not be null since the action request is not a command
- if(actionName.contains("SERVICE_CHECK")) {
- if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_RUN_SERVICE_CHECK)) {
+ if (StringUtils.isEmpty(actionName)) {
+ actionName = "_unknown_action_";
+ }
+
+ if (actionName.contains("SERVICE_CHECK")) {
+ if (!AuthorizationHelper.isAuthorized(resourceType, resourceId, RoleAuthorization.SERVICE_RUN_SERVICE_CHECK)) {
throw new AuthorizationException("The authenticated user is not authorized to execute service checks.");
}
- }
- else if(actionName.equals("DECOMMISSION")) {
- if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_DECOMMISSION_RECOMMISSION)) {
- throw new AuthorizationException("The authenticated user is not authorized to decommission services.");
+ } else {
+ // A custom action has been requested
+ ActionDefinition actionDefinition = (actionName == null)
+ ? null
+ : getManagementController().getAmbariMetaInfo().getActionDefinition(actionName);
+
+ Set<RoleAuthorization> permissions = (actionDefinition == null)
+ ? null
+ : actionDefinition.getPermissions();
+
+ if (!AuthorizationHelper.isAuthorized(resourceType, resourceId, permissions)) {
+ throw new AuthorizationException(String.format("The authenticated user is not authorized to execute the action %s.", actionName));
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
index 0157d49..4a0ea71 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
@@ -38,6 +38,7 @@ public enum RoleAuthorization {
AMBARI_MANAGE_USERS("AMBARI.MANAGE_USERS"),
AMBARI_MANAGE_VIEWS("AMBARI.MANAGE_VIEWS"),
AMBARI_RENAME_CLUSTER("AMBARI.RENAME_CLUSTER"),
+ AMBARI_RUN_CUSTOM_COMMAND("AMBARI.RUN_CUSTOM_COMMAND"),
CLUSTER_MANAGE_CREDENTIALS("CLUSTER.MANAGE_CREDENTIALS"),
CLUSTER_MODIFY_CONFIGS("CLUSTER.MODIFY_CONFIGS"),
CLUSTER_MANAGE_CONFIG_GROUPS("CLUSTER.MANAGE_CONFIG_GROUPS"),
@@ -51,6 +52,7 @@ public enum RoleAuthorization {
CLUSTER_VIEW_METRICS("CLUSTER.VIEW_METRICS"),
CLUSTER_VIEW_STACK_DETAILS("CLUSTER.VIEW_STACK_DETAILS"),
CLUSTER_VIEW_STATUS_INFO("CLUSTER.VIEW_STATUS_INFO"),
+ CLUSTER_RUN_CUSTOM_COMMAND("CLUSTER.RUN_CUSTOM_COMMAND"),
HOST_ADD_DELETE_COMPONENTS("HOST.ADD_DELETE_COMPONENTS"),
HOST_ADD_DELETE_HOSTS("HOST.ADD_DELETE_HOSTS"),
HOST_TOGGLE_MAINTENANCE("HOST.TOGGLE_MAINTENANCE"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 35c773a..185bd58 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -19,6 +19,8 @@ package org.apache.ambari.server.upgrade;
import java.sql.SQLException;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -107,6 +109,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
@Override
protected void executeDMLUpdates() throws AmbariException, SQLException {
updateAMSConfigs();
+ createRoleAuthorizations();
}
protected void updateHostVersionTable() throws SQLException {
@@ -166,6 +169,19 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
return content;
}
+ /**
+ * Create new role authorizations: CLUSTER.RUN_CUSTOM_COMMAND and AMBARI.RUN_CUSTOM_COMMAND
+ *
+ * @throws SQLException
+ */
+ protected void createRoleAuthorizations() throws SQLException {
+ LOG.info("Adding authorizations");
+ addRoleAuthorization("CLUSTER.RUN_CUSTOM_COMMAND", "Perform custom cluster-level actions",
+ Arrays.asList("AMBARI.ADMINISTRATOR:AMBARI", "CLUSTER.ADMINISTRATOR:CLUSTER"));
+
+ addRoleAuthorization("AMBARI.RUN_CUSTOM_COMMAND", "Perform custom administrative actions",
+ Collections.singletonList("AMBARI.ADMINISTRATOR:AMBARI"));
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 38f78c5..c2c965d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -1255,6 +1255,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'CLUSTER.TOGGLE_KERBEROS', 'Enable/disable Kerberos' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'CLUSTER.MANAGE_USER_PERSISTED_DATA', 'Manage cluster-level user persisted data' FROM SYSIBM.SYSDUMMY1 UNION ALL
+ SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' FROM SYSIBM.SYSDUMMY1 UNION ALL
@@ -1263,7 +1264,8 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.ASSIGN_ROLES', 'Assign roles' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.MANAGE_STACK_VERSIONS', 'Manage stack versions' FROM SYSIBM.SYSDUMMY1 UNION ALL
- SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' FROM SYSIBM.SYSDUMMY1;
+ SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' FROM SYSIBM.SYSDUMMY1 UNION ALL
+ SELECT 'AMBARI.RUN_CUSTOM_COMMAND', 'Perform custom administrative actions' FROM SYSIBM.SYSDUMMY1;
-- Set authorizations for View User role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1403,7 +1405,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_ALERTS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
+ SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
-- Set authorizations for Administrator role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1443,6 +1446,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_ALERTS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
@@ -1452,7 +1456,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
+ SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
INSERT INTO adminprivilege (privilege_id, permission_id, resource_id, principal_id)
SELECT 1, 1, 1, 1 FROM SYSIBM.SYSDUMMY1 ;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 25948aa..1d55515 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -1184,6 +1184,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'CLUSTER.TOGGLE_ALERTS', 'Enable/disable cluster-level alerts' UNION ALL
SELECT 'CLUSTER.TOGGLE_KERBEROS', 'Enable/disable Kerberos' UNION ALL
SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
+ SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage administrative settings' UNION ALL
@@ -1192,7 +1193,8 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
SELECT 'AMBARI.ASSIGN_ROLES', 'Assign roles' UNION ALL
SELECT 'AMBARI.MANAGE_STACK_VERSIONS', 'Manage stack versions' UNION ALL
- SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs';
+ SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' UNION ALL
+ SELECT 'AMBARI.RUN_CUSTOM_COMMAND', 'Perform custom administrative actions';
-- Set authorizations for View User role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1334,7 +1336,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_ALERTS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
+ SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
-- Set authorizations for Administrator role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1377,6 +1380,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
@@ -1385,7 +1389,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
+ SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
INSERT INTO adminprivilege (privilege_id, permission_id, resource_id, principal_id) VALUES
(1, 1, 1, 1);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 07cd6a8..49f3e2f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -1203,6 +1203,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'CLUSTER.TOGGLE_ALERTS', 'Enable/disable cluster-level alerts' FROM dual UNION ALL
SELECT 'CLUSTER.TOGGLE_KERBEROS', 'Enable/disable Kerberos' FROM dual UNION ALL
SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' FROM dual UNION ALL
+ SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' FROM dual UNION ALL
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' FROM dual UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' FROM dual UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' FROM dual UNION ALL
@@ -1211,7 +1212,8 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' FROM dual UNION ALL
SELECT 'AMBARI.ASSIGN_ROLES', 'Assign roles' FROM dual UNION ALL
SELECT 'AMBARI.MANAGE_STACK_VERSIONS', 'Manage stack versions' FROM dual UNION ALL
- SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' FROM dual;
+ SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' FROM dual UNION ALL
+ SELECT 'AMBARI.RUN_CUSTOM_COMMAND', 'Perform custom administrative actions' FROM dual;
-- Set authorizations for View User role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1353,7 +1355,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_ALERTS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
+ SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL;
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
-- Set authorizations for Administrator role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1396,6 +1399,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
@@ -1404,7 +1408,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
+ SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
insert into adminprivilege (privilege_id, permission_id, resource_id, principal_id)
select 1, 1, 1, 1 from dual;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index f03767b..7aa52ef 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -1175,6 +1175,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'CLUSTER.TOGGLE_ALERTS', 'Enable/disable cluster-level alerts' UNION ALL
SELECT 'CLUSTER.TOGGLE_KERBEROS', 'Enable/disable Kerberos' UNION ALL
SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
+ SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage administrative settings' UNION ALL
@@ -1183,7 +1184,8 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
SELECT 'AMBARI.ASSIGN_ROLES', 'Assign roles' UNION ALL
SELECT 'AMBARI.MANAGE_STACK_VERSIONS', 'Manage stack versions' UNION ALL
- SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs';
+ SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' UNION ALL
+ SELECT 'AMBARI.RUN_CUSTOM_COMMAND', 'Perform custom administrative actions';
-- Set authorizations for View User role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1325,7 +1327,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_ALERTS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
+ SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
-- Set authorizations for Administrator role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1368,6 +1371,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
@@ -1376,7 +1380,8 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
+ SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
INSERT INTO adminprivilege (privilege_id, permission_id, resource_id, principal_id) VALUES
(1, 1, 1, 1);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 535d847..0c95471 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -1200,6 +1200,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT 'CLUSTER.MANAGE_CONFIG_GROUPS', 'Manage cluster config groups' UNION ALL
SELECT 'CLUSTER.TOGGLE_KERBEROS', 'Enable/disable Kerberos' UNION ALL
SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
+ SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' UNION ALL
@@ -1208,7 +1209,8 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
SELECT 'AMBARI.ASSIGN_ROLES', 'Assign roles' UNION ALL
SELECT 'AMBARI.MANAGE_STACK_VERSIONS', 'Manage stack versions' UNION ALL
- SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs';
+ SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' UNION ALL
+ SELECT 'AMBARI.RUN_CUSTOM_COMMAND', 'Perform custom administrative actions';
-- Set authorizations for View User role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1350,7 +1352,8 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT permission_id, 'CLUSTER.TOGGLE_ALERTS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
+ SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
-- Set authorizations for Administrator role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1393,6 +1396,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_SERVICE_CHECK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
@@ -1401,7 +1405,8 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
+ SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.RUN_SERVICE_CHECK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
insert into adminprivilege (privilege_id, permission_id, resource_id, principal_id)
select 1, 1, 1, 1;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 1bfde7a..631b5c4 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -1203,6 +1203,7 @@ BEGIN TRANSACTION
SELECT 'CLUSTER.MANAGE_CONFIG_GROUPS', 'Manage cluster config groups' UNION ALL
SELECT 'CLUSTER.TOGGLE_KERBEROS', 'Enable/disable Kerberos' UNION ALL
SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
+ SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' UNION ALL
@@ -1211,7 +1212,8 @@ BEGIN TRANSACTION
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
SELECT 'AMBARI.ASSIGN_ROLES', 'Assign roles' UNION ALL
SELECT 'AMBARI.MANAGE_STACK_VERSIONS', 'Manage stack versions' UNION ALL
- SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs';
+ SELECT 'AMBARI.EDIT_STACK_REPOS', 'Edit stack repository URLs' UNION ALL
+ SELECT 'AMBARI.RUN_CUSTOM_COMMAND', 'Perform custom administrative actions';
-- Set authorizations for View User role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1353,7 +1355,8 @@ BEGIN TRANSACTION
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_CONFIG_GROUPS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
+ SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
-- Set authorizations for Administrator role
INSERT INTO permission_roleauthorization(permission_id, authorization_id)
@@ -1396,6 +1399,7 @@ BEGIN TRANSACTION
SELECT permission_id, 'CLUSTER.TOGGLE_KERBEROS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
@@ -1404,7 +1408,8 @@ BEGIN TRANSACTION
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
- SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
+ SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
insert into adminprivilege (privilege_id, permission_id, resource_id, principal_id)
select 1, 1, 1, 1;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/main/resources/custom_action_definitions/system_action_definitions.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_action_definitions/system_action_definitions.xml b/ambari-server/src/main/resources/custom_action_definitions/system_action_definitions.xml
index bc1c271..fc17584 100644
--- a/ambari-server/src/main/resources/custom_action_definitions/system_action_definitions.xml
+++ b/ambari-server/src/main/resources/custom_action_definitions/system_action_definitions.xml
@@ -39,6 +39,7 @@
<defaultTimeout>60</defaultTimeout>
<description>Update repo files on hosts</description>
<targetType>ALL</targetType>
+ <permissions>HOST.ADD_DELETE_COMPONENTS, HOST.ADD_DELETE_HOSTS, SERVICE.ADD_DELETE_SERVICES</permissions>
</actionDefinition>
<actionDefinition>
<actionName>clear_repocache</actionName>
@@ -49,6 +50,7 @@
<defaultTimeout>60</defaultTimeout>
<description>Clear repository cache on hosts</description>
<targetType>ALL</targetType>
+ <permissions>HOST.ADD_DELETE_COMPONENTS, HOST.ADD_DELETE_HOSTS, SERVICE.ADD_DELETE_SERVICES</permissions>
</actionDefinition>
<actionDefinition>
<actionName>validate_configs</actionName>
@@ -59,6 +61,7 @@
<defaultTimeout>60</defaultTimeout>
<description>Validate if provided service config can be applied to specified hosts</description>
<targetType>ALL</targetType>
+ <permissions>CLUSTER.MODIFY_CONFIGS, SERVICE.MODIFY_CONFIGS</permissions>
</actionDefinition>
<actionDefinition>
<actionName>install_packages</actionName>
@@ -69,6 +72,7 @@
<defaultTimeout>60</defaultTimeout>
<description>Distribute repositories and install packages</description>
<targetType>ALL</targetType>
+ <permissions>HOST.ADD_DELETE_COMPONENTS, HOST.ADD_DELETE_HOSTS, SERVICE.ADD_DELETE_SERVICES</permissions>
</actionDefinition>
<actionDefinition>
<actionName>ru_execute_tasks</actionName>
@@ -78,5 +82,6 @@
<targetComponent/>
<description>Perform upgrade action</description>
<targetType>ANY</targetType>
+ <permissions>CLUSTER.UPGRADE_DOWNGRADE_STACK</permissions>
</actionDefinition>
</actionDefinitions>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
index d06aa1e..5dfc74d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
@@ -1313,49 +1313,122 @@ public class RequestResourceProviderTest {
@Test
public void testCreateResourcesCheckHostForNonClusterAsAdministrator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createAdministrator(), "check_host",
+ testCreateResources(TestAuthenticationFactory.createAdministrator(), null, null, "check_host",
EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
}
- @Test
+ @Test(expected = AuthorizationException.class)
public void testCreateResourcesCheckHostForNonClusterAsClusterAdministrator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createClusterAdministrator(), "check_host",
+ testCreateResources(TestAuthenticationFactory.createClusterAdministrator(), null, null, "check_host",
EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
}
- @Test
+ @Test(expected = AuthorizationException.class)
public void testCreateResourcesCheckHostForNonClusterAsClusterOperator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createClusterOperator(), "check_host",
+ testCreateResources(TestAuthenticationFactory.createClusterOperator(), null, null, "check_host",
EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
}
@Test(expected = AuthorizationException.class)
public void testCreateResourcesCheckHostForNonClusterAsServiceAdministrator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createServiceAdministrator(), "check_host",
+ testCreateResources(TestAuthenticationFactory.createServiceAdministrator(), null, null, "check_host",
+ EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
+ }
+
+ @Test
+ public void testCreateResourcesCheckHostForClusterAsAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createAdministrator(), "c1", null, "check_host",
EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
}
@Test
- public void testCreateResourcesCheckJavaForNonClusterAsAdministrator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createAdministrator(), "check_java", null);
+ public void testCreateResourcesCheckHostForClusterAsClusterAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterAdministrator(), "c1", null, "check_host",
+ EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
+ }
+
+ @Test
+ public void testCreateResourcesCheckHostForClusterAsClusterOperator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterOperator(), "c1", null, "check_host",
+ EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
}
@Test(expected = AuthorizationException.class)
- public void testCreateResourcesCheckJavaForNonClusterAsClusterAdministrator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createClusterAdministrator(), "check_java", null);
+ public void testCreateResourcesCheckHostForClusterAsServiceAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createServiceAdministrator(), "c1", null, "check_host",
+ EnumSet.of(RoleAuthorization.HOST_ADD_DELETE_HOSTS));
+ }
+
+ @Test
+ public void testCreateResourcesServiceCheckForClusterAsAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createAdministrator(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesServiceCheckForClusterAsClusterAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterAdministrator(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesServiceCheckForClusterAsClusterOperator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterOperator(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesServiceCheckForClusterAsServiceAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createServiceAdministrator(), "c1", "SOME_SERVICE_CHECK", null, null);
}
@Test(expected = AuthorizationException.class)
- public void testCreateResourcesCheckJavaForNonClusterAsClusterOperator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createClusterOperator(), "check_java", null);
+ public void testCreateResourcesServiceCheckForClusterAsClusterUser() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterUser(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+ @Test
+ public void testCreateResourcesDecommissionForClusterAsAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createAdministrator(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesDecommissionForClusterAsClusterAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterAdministrator(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesDecommissionForClusterAsClusterOperator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterOperator(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesDecommissionForClusterAsServiceAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createServiceAdministrator(), "c1", "SOME_SERVICE_CHECK", null, null);
}
@Test(expected = AuthorizationException.class)
+ public void testCreateResourcesDecommissionForClusterAsClusterUser() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterUser(), "c1", "SOME_SERVICE_CHECK", null, null);
+ }
+
+ @Test
+ public void testCreateResourcesCustomActionNoPrivsForNonClusterAsAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createAdministrator(), null, null, "custom_action", null);
+ }
+
+ @Test
+ public void testCreateResourcesCustomActionNoPrivsForNonClusterAsClusterAdministrator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterAdministrator(), null, null, "custom_action", null);
+ }
+
+ @Test
+ public void testCreateResourcesCustomActionNoPrivsForNonClusterAsClusterOperator() throws Exception {
+ testCreateResources(TestAuthenticationFactory.createClusterOperator(), null, null, "custom_action", null);
+ }
+
+ @Test
public void testCreateResourcesForNonClusterAsServiceAdministrator() throws Exception {
- testCreateResourcesForNonCluster(TestAuthenticationFactory.createServiceAdministrator(), "check_java", null);
+ testCreateResources(TestAuthenticationFactory.createServiceAdministrator(), null, null, "custom_action", null);
}
- private void testCreateResourcesForNonCluster(Authentication authentication, String actionName, Set<RoleAuthorization> permissions) throws Exception {
+ private void testCreateResources(Authentication authentication, String clusterName, String commandName, String actionName, Set<RoleAuthorization> permissions) throws Exception {
Resource.Type type = Resource.Type.Request;
Capture<ExecuteActionRequest> actionRequest = newCapture();
@@ -1373,8 +1446,18 @@ public class RequestResourceProviderTest {
expect(actionDefinition.getPermissions()).andReturn(permissions).anyTimes();
expect(response.getMessage()).andReturn("Message").anyTimes();
+ Cluster cluster = createMock(Cluster.class);
+ Clusters clusters = createMock(Clusters.class);
+ if(clusterName != null) {
+ expect(cluster.getResourceId()).andReturn(4L).anyTimes();
+
+ expect(clusters.getCluster(clusterName)).andReturn(cluster).anyTimes();
+
+ expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+ }
+
// replay
- replay(managementController, metaInfo, actionDefinition, response);
+ replay(managementController, metaInfo, actionDefinition, response, cluster, clusters);
// add the property map to a set for the request. add more maps for multiple creates
Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
@@ -1387,11 +1470,16 @@ public class RequestResourceProviderTest {
filterSet.add(filterMap);
properties.put(RequestResourceProvider.REQUEST_RESOURCE_FILTER_ID, filterSet);
-
+ properties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, clusterName);
propertySet.add(properties);
Map<String, String> requestInfoProperties = new HashMap<String, String>();
- requestInfoProperties.put(RequestResourceProvider.ACTION_ID, actionName);
+ if(commandName != null) {
+ requestInfoProperties.put(RequestResourceProvider.COMMAND_ID, commandName);
+ }
+ if(actionName != null) {
+ requestInfoProperties.put(RequestResourceProvider.ACTION_ID, actionName);
+ }
SecurityContextHolder.getContext().setAuthentication(authentication);
@@ -1406,9 +1494,15 @@ public class RequestResourceProviderTest {
ExecuteActionRequest capturedRequest = actionRequest.getValue();
Assert.assertTrue(actionRequest.hasCaptured());
- Assert.assertFalse("expected an action", capturedRequest.isCommand());
- Assert.assertEquals(actionName, capturedRequest.getActionName());
- Assert.assertEquals(null, capturedRequest.getCommandName());
+
+ if(actionName != null) {
+ Assert.assertFalse("expected an action", capturedRequest.isCommand());
+ Assert.assertEquals(actionName, capturedRequest.getActionName());
+ }
+ if(commandName != null) {
+ Assert.assertTrue("expected a command", capturedRequest.isCommand());
+ Assert.assertEquals(commandName, capturedRequest.getCommandName());
+ }
Assert.assertNotNull(capturedRequest.getResourceFilters());
Assert.assertEquals(1, capturedRequest.getResourceFilters().size());
RequestResourceFilter capturedResourceFilter = capturedRequest.getResourceFilters().get(0);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java b/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
index d97cd9a..12d1ac5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
@@ -217,6 +217,7 @@ public class TestAuthenticationFactory {
RoleAuthorization.SERVICE_VIEW_METRICS,
RoleAuthorization.SERVICE_VIEW_STATUS_INFO,
RoleAuthorization.SERVICE_VIEW_OPERATIONAL_LOGS,
+ RoleAuthorization.CLUSTER_RUN_CUSTOM_COMMAND,
RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA)));
return permissionEntity;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ac0c66e9/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index c4e0a7c..7b6c3ad 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -18,24 +18,24 @@
package org.apache.ambari.server.upgrade;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-
import javax.persistence.EntityManager;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
+import junit.framework.Assert;
+import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.PermissionDAO;
+import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
+import org.apache.ambari.server.orm.dao.RoleAuthorizationDAO;
+import org.apache.ambari.server.orm.entities.PermissionEntity;
+import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
+import org.apache.ambari.server.orm.entities.RoleAuthorizationEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
@@ -54,14 +54,26 @@ import com.google.inject.Module;
import com.google.inject.Provider;
import java.lang.reflect.Method;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.newCapture;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertTrue;
+
/**
* {@link UpgradeCatalog250} unit tests.
*/
@@ -111,14 +123,19 @@ public class UpgradeCatalog250Test {
@Test
public void testExecuteDMLUpdates() throws Exception {
Method updateAmsConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateAMSConfigs");
+ Method createRoleAuthorizations = UpgradeCatalog250.class.getDeclaredMethod("createRoleAuthorizations");
UpgradeCatalog250 upgradeCatalog250 = createMockBuilder(UpgradeCatalog250.class)
- .addMockedMethod(updateAmsConfigs)
- .createMock();
+ .addMockedMethod(updateAmsConfigs)
+ .addMockedMethod(createRoleAuthorizations)
+ .createMock();
upgradeCatalog250.updateAMSConfigs();
expectLastCall().once();
+ upgradeCatalog250.createRoleAuthorizations();
+ expectLastCall().once();
+
replay(upgradeCatalog250);
upgradeCatalog250.executeDMLUpdates();
@@ -196,4 +213,82 @@ public class UpgradeCatalog250Test {
Map<String, String> updatedProperties = propertiesCapture.getValue();
assertTrue(Maps.difference(newPropertiesAmsEnv, updatedProperties).areEqual());
}
+
+ @Test
+ public void testCreateRoleAuthorizations() throws AmbariException, SQLException {
+
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+ ResourceTypeEntity ambariResourceTypeEntity = easyMockSupport.createMock(ResourceTypeEntity.class);
+
+ ResourceTypeEntity clusterResourceTypeEntity = easyMockSupport.createMock(ResourceTypeEntity.class);
+
+ Collection<RoleAuthorizationEntity> ambariAdministratorAuthorizations = new ArrayList<RoleAuthorizationEntity>();
+ Collection<RoleAuthorizationEntity> clusterAdministratorAuthorizations = new ArrayList<RoleAuthorizationEntity>();
+
+ PermissionEntity clusterAdministratorPermissionEntity = easyMockSupport.createMock(PermissionEntity.class);
+ expect(clusterAdministratorPermissionEntity.getAuthorizations())
+ .andReturn(clusterAdministratorAuthorizations)
+ .times(1);
+
+ PermissionEntity ambariAdministratorPermissionEntity = easyMockSupport.createMock(PermissionEntity.class);
+ expect(ambariAdministratorPermissionEntity.getAuthorizations())
+ .andReturn(ambariAdministratorAuthorizations)
+ .times(2);
+
+ PermissionDAO permissionDAO = easyMockSupport.createMock(PermissionDAO.class);
+ expect(permissionDAO.findPermissionByNameAndType("AMBARI.ADMINISTRATOR", ambariResourceTypeEntity))
+ .andReturn(ambariAdministratorPermissionEntity)
+ .times(2);
+ expect(permissionDAO.findPermissionByNameAndType("CLUSTER.ADMINISTRATOR", clusterResourceTypeEntity))
+ .andReturn(clusterAdministratorPermissionEntity)
+ .times(1);
+ expect(permissionDAO.merge(ambariAdministratorPermissionEntity))
+ .andReturn(ambariAdministratorPermissionEntity)
+ .times(2);
+ expect(permissionDAO.merge(clusterAdministratorPermissionEntity))
+ .andReturn(clusterAdministratorPermissionEntity)
+ .times(1);
+
+ ResourceTypeDAO resourceTypeDAO = easyMockSupport.createMock(ResourceTypeDAO.class);
+ expect(resourceTypeDAO.findByName("AMBARI")).andReturn(ambariResourceTypeEntity).times(2);
+ expect(resourceTypeDAO.findByName("CLUSTER")).andReturn(clusterResourceTypeEntity).times(1);
+
+ RoleAuthorizationDAO roleAuthorizationDAO = easyMockSupport.createMock(RoleAuthorizationDAO.class);
+ expect(roleAuthorizationDAO.findById("CLUSTER.RUN_CUSTOM_COMMAND")).andReturn(null).times(1);
+ expect(roleAuthorizationDAO.findById("AMBARI.RUN_CUSTOM_COMMAND")).andReturn(null).times(1);
+
+ Capture<RoleAuthorizationEntity> captureClusterRunCustomCommandEntity = newCapture();
+ roleAuthorizationDAO.create(capture(captureClusterRunCustomCommandEntity));
+ expectLastCall().times(1);
+
+ Capture<RoleAuthorizationEntity> captureAmbariRunCustomCommandEntity = newCapture();
+ roleAuthorizationDAO.create(capture(captureAmbariRunCustomCommandEntity));
+ expectLastCall().times(1);
+
+ Injector injector = easyMockSupport.createNiceMock(Injector.class);
+ expect(injector.getInstance(RoleAuthorizationDAO.class)).andReturn(roleAuthorizationDAO).atLeastOnce();
+ expect(injector.getInstance(PermissionDAO.class)).andReturn(permissionDAO).atLeastOnce();
+ expect(injector.getInstance(ResourceTypeDAO.class)).andReturn(resourceTypeDAO).atLeastOnce();
+
+ easyMockSupport.replayAll();
+ new UpgradeCatalog250(injector).createRoleAuthorizations();
+ easyMockSupport.verifyAll();
+
+ RoleAuthorizationEntity ambariRunCustomCommandEntity = captureAmbariRunCustomCommandEntity.getValue();
+ RoleAuthorizationEntity clusterRunCustomCommandEntity = captureClusterRunCustomCommandEntity.getValue();
+
+ Assert.assertEquals("AMBARI.RUN_CUSTOM_COMMAND", ambariRunCustomCommandEntity.getAuthorizationId());
+ Assert.assertEquals("Perform custom administrative actions", ambariRunCustomCommandEntity.getAuthorizationName());
+
+ Assert.assertEquals("CLUSTER.RUN_CUSTOM_COMMAND", clusterRunCustomCommandEntity.getAuthorizationId());
+ Assert.assertEquals("Perform custom cluster-level actions", clusterRunCustomCommandEntity.getAuthorizationName());
+
+ Assert.assertEquals(2, ambariAdministratorAuthorizations.size());
+ Assert.assertTrue(ambariAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
+ Assert.assertTrue(ambariAdministratorAuthorizations.contains(ambariRunCustomCommandEntity));
+
+ Assert.assertEquals(1, clusterAdministratorAuthorizations.size());
+ Assert.assertTrue(clusterAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
+ }
}
[10/16] ambari git commit: AMBARI-18445. SmartSense error during host
assignment (aonishuk)
Posted by nc...@apache.org.
AMBARI-18445. SmartSense error during host assignment (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2c7bd2f4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2c7bd2f4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2c7bd2f4
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2c7bd2f4984e4c08f8154c5a0bf4d2e6fac57c10
Parents: 9b6bc21
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 22 16:57:35 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 22 16:57:35 2016 +0300
----------------------------------------------------------------------
.../stacks/HDP/2.0.6/services/stack_advisor.py | 7 ++++---
.../python/stacks/2.0.6/common/test_stack_advisor.py | 12 ++++++++----
.../test/python/stacks/2.5/common/test_stack_advisor.py | 5 ++++-
3 files changed, 16 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2c7bd2f4/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 800bfa2..6cec585 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -65,21 +65,22 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
if "+" in cardinality:
hostsMin = int(cardinality[:-1])
if componentHostsCount < hostsMin:
- message = "At least {0} {1} components should be installed in cluster.".format(hostsMin, componentDisplayName)
+ message = "at least {0} {1} components should be installed in cluster.".format(hostsMin, componentDisplayName)
elif "-" in cardinality:
nums = cardinality.split("-")
hostsMin = int(nums[0])
hostsMax = int(nums[1])
if componentHostsCount > hostsMax or componentHostsCount < hostsMin:
- message = "Between {0} and {1} {2} components should be installed in cluster.".format(hostsMin, hostsMax, componentDisplayName)
+ message = "between {0} and {1} {2} components should be installed in cluster.".format(hostsMin, hostsMax, componentDisplayName)
elif "ALL" == cardinality:
if componentHostsCount != hostsCount:
message = "{0} component should be installed on all hosts in cluster.".format(componentDisplayName)
else:
if componentHostsCount != int(cardinality):
- message = "Exactly {0} {1} components should be installed in cluster.".format(int(cardinality), componentDisplayName)
+ message = "exactly {0} {1} components should be installed in cluster.".format(int(cardinality), componentDisplayName)
if message is not None:
+ message = "You have selected {0} {1} components. Please consider that {2}".format(componentHostsCount, componentDisplayName, message)
items.append({"type": 'host-component', "level": 'ERROR', "message": message, "component-name": componentName})
# Validating host-usage
http://git-wip-us.apache.org/repos/asf/ambari/blob/2c7bd2f4/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index a70922f..cd69dc3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -146,7 +146,8 @@ class TestHDP206StackAdvisor(TestCase):
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
- {"message": "Ganglia Monitor component should be installed on all hosts in cluster.", "level": "ERROR"}
+ {"message": "You have selected 1 Ganglia Monitor components. Please consider that Ganglia Monitor component should be installed on all hosts in cluster.",
+ "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
@@ -165,7 +166,8 @@ class TestHDP206StackAdvisor(TestCase):
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
- {"message": "Exactly 2 Ganglia Monitor components should be installed in cluster.", "level": "ERROR"}
+ {"message": "You have selected 1 Ganglia Monitor components. Please consider that exactly 2 Ganglia Monitor components should be installed in cluster.",
+ "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
@@ -184,7 +186,8 @@ class TestHDP206StackAdvisor(TestCase):
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
- {"message": "At least 3 Ganglia Server components should be installed in cluster.", "level": "ERROR"}
+ {"message": "You have selected 2 Ganglia Server components. Please consider that at least 3 Ganglia Server components should be installed in cluster.",
+ "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
@@ -361,7 +364,8 @@ class TestHDP206StackAdvisor(TestCase):
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
- {"message": "Between 0 and 1 Ganglia Server components should be installed in cluster.", "level": "ERROR"}
+ {"message": "You have selected 2 Ganglia Server components. Please consider that between 0 and 1 Ganglia Server components should be installed in cluster.",
+ "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
http://git-wip-us.apache.org/repos/asf/ambari/blob/2c7bd2f4/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 0ed1761..72e59aa 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -436,7 +436,10 @@ class TestHDP25StackAdvisor(TestCase):
services = self.load_json("services-normal-his-2-hosts.json")
validations = self.stackAdvisor.getComponentLayoutValidations(services, hosts)
- expected = {'component-name': 'HIVE_SERVER_INTERACTIVE', 'message': 'Between 0 and 1 HiveServer2 Interactive components should be installed in cluster.', 'type': 'host-component', 'level': 'ERROR'}
+ expected = {'component-name': 'HIVE_SERVER_INTERACTIVE',
+ 'message': 'You have selected 2 HiveServer2 Interactive components. Please consider that between 0 and 1 HiveServer2 Interactive components should be installed in cluster.',
+ 'type': 'host-component',
+ 'level': 'ERROR'}
self.assertEquals(validations[0], expected)
[14/16] ambari git commit: AMBARI-18448. NPE when installing secure
cluster via Blueprints due to null logger (rlevas)
Posted by nc...@apache.org.
AMBARI-18448. NPE when installing secure cluster via Blueprints due to null logger (rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33a573cd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33a573cd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33a573cd
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 33a573cdb54229cee37e1f49837be783de3cba7b
Parents: ac0c66e
Author: Robert Levas <rl...@hortonworks.com>
Authored: Thu Sep 22 14:48:29 2016 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Sep 22 14:48:29 2016 -0400
----------------------------------------------------------------------
.../kerberos/ConfigureAmbariIdentitiesServerAction.java | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/33a573cd/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
index 9c2c622..10647e8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
@@ -184,12 +184,16 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
" This is not an error if an Ambari agent is not installed on the Ambari server host.",
principal, ambariServerHostName);
LOG.warn(message);
- actionLog.writeStdErr(message);
+ if(actionLog != null) {
+ actionLog.writeStdErr(message);
+ }
} else if (!kerberosPrincipalHostDAO.exists(principal, ambariServerHostID)) {
kerberosPrincipalHostDAO.create(principal, ambariServerHostID);
}
- actionLog.writeStdOut(String.format("Created Ambari server keytab file for %s at %s", principal, destKeytabFile));
+ if(actionLog != null) {
+ actionLog.writeStdOut(String.format("Created Ambari server keytab file for %s at %s", principal, destKeytabFile));
+ }
}
} catch (InterruptedException | IOException e) {
throw new AmbariException(e.getLocalizedMessage(), e);
[02/16] ambari git commit: AMBARI-18406. Create authentication filter
to perform Kerberos authentication for Ambari (rlevas)
Posted by nc...@apache.org.
AMBARI-18406. Create authentication filter to perform Kerberos authentication for Ambari (rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7e08470c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7e08470c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7e08470c
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7e08470cfef5b9dd29724c318dd996d789e0414e
Parents: dcf779d
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Sep 21 10:48:59 2016 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Sep 21 10:48:59 2016 -0400
----------------------------------------------------------------------
ambari-project/pom.xml | 5 +
ambari-server/pom.xml | 6 +-
.../server/configuration/Configuration.java | 30 ++-
.../server/controller/KerberosHelper.java | 2 +
.../server/controller/KerberosHelperImpl.java | 18 +-
.../server/security/AmbariEntryPoint.java | 15 +-
.../AmbariAuthToLocalUserDetailsService.java | 139 +++++++++++
.../AmbariKerberosAuthenticationFilter.java | 172 ++++++++++++++
.../kerberos/AmbariKerberosTicketValidator.java | 93 ++++++++
.../AbstractPrepareKerberosServerAction.java | 10 +-
.../ConfigureAmbariIdentitiesServerAction.java | 235 +++++++++++++++++++
.../ConfigureAmbariIndetityServerAction.java | 208 ----------------
.../kerberos/CreatePrincipalsServerAction.java | 5 +-
.../kerberos/KerberosServerAction.java | 12 +-
.../webapp/WEB-INF/spring-security.xml | 32 ++-
.../server/controller/KerberosHelperTest.java | 6 +-
...AmbariAuthToLocalUserDetailsServiceTest.java | 97 ++++++++
.../AmbariKerberosAuthenticationFilterTest.java | 133 +++++++++++
.../AmbariKerberosTicketValidatorTest.java | 49 ++++
19 files changed, 1026 insertions(+), 241 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-project/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-project/pom.xml b/ambari-project/pom.xml
index 2615b46..4f045fe 100644
--- a/ambari-project/pom.xml
+++ b/ambari-project/pom.xml
@@ -132,6 +132,11 @@
<version>3.1.2.RELEASE</version>
</dependency>
<dependency>
+ <groupId>org.springframework.security.kerberos</groupId>
+ <artifactId>spring-security-kerberos-web</artifactId>
+ <version>1.0.1.RELEASE</version>
+ </dependency>
+ <dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-mock</artifactId>
<version>2.0.8</version>
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 197c992..5731c9d 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -44,7 +44,7 @@
<stacksSrcLocation>src/main/resources/stacks/${stack.distribution}</stacksSrcLocation>
<tarballResourcesFolder>src/main/resources</tarballResourcesFolder>
<skipPythonTests>false</skipPythonTests>
- <hadoop.version>2.7.1</hadoop.version>
+ <hadoop.version>2.7.2</hadoop.version>
<empty.dir>src/main/package</empty.dir> <!-- any directory in project with not very big amount of files (not to waste-load them) -->
<el.log>ALL</el.log> <!-- log level for EclipseLink eclipselink-staticweave-maven-plugin -->
</properties>
@@ -986,6 +986,10 @@
<artifactId>spring-security-web</artifactId>
</dependency>
<dependency>
+ <groupId>org.springframework.security.kerberos</groupId>
+ <artifactId>spring-security-kerberos-web</artifactId>
+ </dependency>
+ <dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-mock</artifactId>
<scope>test</scope>
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index b2fa4c0..f1058b6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -5284,9 +5284,11 @@ public class Configuration {
try {
orderedUserTypes.add(UserType.valueOf(type.toUpperCase()));
} catch (IllegalArgumentException e) {
- throw new IllegalArgumentException(String.format("While processing ordered user types from %s, " +
+ String message = String.format("While processing ordered user types from %s, " +
"%s was found to be an invalid user type.",
- KERBEROS_AUTH_USER_TYPES.getKey(), type), e);
+ KERBEROS_AUTH_USER_TYPES.getKey(), type);
+ LOG.error(message);
+ throw new IllegalArgumentException(message, e);
}
}
}
@@ -5320,9 +5322,11 @@ public class Configuration {
// Validate the SPNEGO principal name to ensure it was set.
// Log any found issues.
if (StringUtils.isEmpty(kerberosAuthProperties.getSpnegoPrincipalName())) {
- throw new IllegalArgumentException(String.format("The SPNEGO principal name specified in %s is empty. " +
+ String message = String.format("The SPNEGO principal name specified in %s is empty. " +
"This will cause issues authenticating users using Kerberos.",
- KERBEROS_AUTH_SPNEGO_PRINCIPAL.getKey()));
+ KERBEROS_AUTH_SPNEGO_PRINCIPAL.getKey());
+ LOG.error(message);
+ throw new IllegalArgumentException(message);
}
// Get the SPNEGO keytab file. There is nothing special to process for this value.
@@ -5331,19 +5335,25 @@ public class Configuration {
// Validate the SPNEGO keytab file to ensure it was set, it exists and it is readable by Ambari.
// Log any found issues.
if (StringUtils.isEmpty(kerberosAuthProperties.getSpnegoKeytabFilePath())) {
- throw new IllegalArgumentException(String.format("The SPNEGO keytab file path specified in %s is empty. " +
+ String message = String.format("The SPNEGO keytab file path specified in %s is empty. " +
"This will cause issues authenticating users using Kerberos.",
- KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey()));
+ KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
+ LOG.error(message);
+ throw new IllegalArgumentException(message);
} else {
File keytabFile = new File(kerberosAuthProperties.getSpnegoKeytabFilePath());
if (!keytabFile.exists()) {
- throw new IllegalArgumentException(String.format("The SPNEGO keytab file path (%s) specified in %s does not exist. " +
+ String message = String.format("The SPNEGO keytab file path (%s) specified in %s does not exist. " +
"This will cause issues authenticating users using Kerberos.",
- keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey()));
+ keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
+ LOG.error(message);
+ throw new IllegalArgumentException(message);
} else if (!keytabFile.canRead()) {
- throw new IllegalArgumentException(String.format("The SPNEGO keytab file path (%s) specified in %s cannot be read. " +
+ String message = String.format("The SPNEGO keytab file path (%s) specified in %s cannot be read. " +
"This will cause issues authenticating users using Kerberos.",
- keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey()));
+ keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
+ LOG.error(message);
+ throw new IllegalArgumentException(message);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index c4d21fc..1153d01 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -70,6 +70,8 @@ public interface KerberosHelper {
String AMBARI_IDENTITY_NAME = "ambari-server";
+ String SPNEGO_IDENTITY_NAME = "spnego";
+
String CREATE_AMBARI_PRINCIPAL = "create_ambari_principal";
String MANAGE_IDENTITIES = "manage_identities";
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 5bc5cd8..a3c6fd4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -61,7 +61,7 @@ import org.apache.ambari.server.security.encryption.CredentialStoreService;
import org.apache.ambari.server.serveraction.ActionLog;
import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
-import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIndetityServerAction;
+import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIdentitiesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
@@ -761,7 +761,7 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param ambariServerIdentity the ambari server's {@link KerberosIdentityDescriptor}
* @param configurations a map of compiled configrations used for variable replacment
* @throws AmbariException
- * @see ConfigureAmbariIndetityServerAction#installAmbariServerIdentity(String, String, String, ActionLog)
+ * @see ConfigureAmbariIdentitiesServerAction#installAmbariServerIdentity(String, String, String, ActionLog)
*/
private void installAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
Map<String, Map<String, String>> configurations) throws AmbariException {
@@ -775,7 +775,7 @@ public class KerberosHelperImpl implements KerberosHelper {
if(keytabDescriptor != null) {
String keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
- injector.getInstance(ConfigureAmbariIndetityServerAction.class)
+ injector.getInstance(ConfigureAmbariIdentitiesServerAction.class)
.installAmbariServerIdentity(principal, ambariServerPrincipalEntity.getCachedKeytabPath(), keytabFilePath, null);
}
}
@@ -1259,7 +1259,7 @@ public class KerberosHelperImpl implements KerberosHelper {
serviceName, componentName, kerberosDescriptor, filterContext);
if (hostname.equals(ambariServerHostname)) {
- addAmbariServerIdentity(kerberosEnvConfig.getProperties(), kerberosDescriptor, identities);
+ addAmbariServerIdentities(kerberosEnvConfig.getProperties(), kerberosDescriptor, identities);
}
if (!identities.isEmpty()) {
@@ -1346,7 +1346,7 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param kerberosDescriptor the kerberos descriptor
* @param identities the collection of identities to add to
*/
- void addAmbariServerIdentity(Map<String, String> kerberosEnvProperties, KerberosDescriptor kerberosDescriptor, List<KerberosIdentityDescriptor> identities) {
+ void addAmbariServerIdentities(Map<String, String> kerberosEnvProperties, KerberosDescriptor kerberosDescriptor, List<KerberosIdentityDescriptor> identities) {
// Determine if we should _calculate_ the Ambari service identity.
// If kerberos-env/create_ambari_principal is not set to false the identity should be calculated.
boolean createAmbariPrincipal = (kerberosEnvProperties == null) || !"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
@@ -1357,6 +1357,12 @@ public class KerberosHelperImpl implements KerberosHelper {
if (ambariServerIdentity != null) {
identities.add(ambariServerIdentity);
}
+
+ // Add the spnego principal for the Ambari server host....
+ KerberosIdentityDescriptor spnegoIdentity = kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME);
+ if (spnegoIdentity != null) {
+ identities.add(spnegoIdentity);
+ }
}
}
@@ -2799,7 +2805,7 @@ public class KerberosHelperImpl implements KerberosHelper {
clusterHostInfoJson,
"{}",
hostParamsJson,
- ConfigureAmbariIndetityServerAction.class,
+ ConfigureAmbariIdentitiesServerAction.class,
event,
commandParameters,
"Configure Ambari Identity",
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java b/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
index 2028f46..e37976f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/AmbariEntryPoint.java
@@ -28,6 +28,19 @@ import java.io.IOException;
public class AmbariEntryPoint implements AuthenticationEntryPoint {
@Override
public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException) throws IOException, ServletException {
- response.sendError(HttpServletResponse.SC_FORBIDDEN, authException.getMessage());
+ /* *****************************************************************************************
+ * To maintain backward compatibility and respond with the appropriate response when
+ * authentication is needed, by default return an HTTP 403 status.
+ *
+ * However if requested by the user, respond such that the client is challenged to Negotiate
+ * and reissue the request with a Kerberos token. This response is an HTTP 401 status with the
+ * WWW-Authenticate: Negotiate" header.
+ * ****************************************************************************************** */
+ if ("true".equalsIgnoreCase(request.getHeader("X-Negotiate-Authentication"))) {
+ response.setHeader("WWW-Authenticate", "Negotiate");
+ response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication requested");
+ } else {
+ response.sendError(HttpServletResponse.SC_FORBIDDEN, authException.getMessage());
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
new file mode 100644
index 0000000..3c62646
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authentication.kerberos;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
+import org.apache.ambari.server.security.authorization.UserType;
+import org.apache.ambari.server.security.authorization.Users;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.security.core.userdetails.User;
+import org.springframework.security.core.userdetails.UserDetails;
+import org.springframework.security.core.userdetails.UserDetailsService;
+import org.springframework.security.core.userdetails.UsernameNotFoundException;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * AmbariAuthToLocalUserDetailsService is a {@link UserDetailsService} that translates
+ * a Kerberos principal name into a local username that may be used when looking up
+ * and Ambari user account.
+ */
+public class AmbariAuthToLocalUserDetailsService implements UserDetailsService {
+ private static final Logger LOG = LoggerFactory.getLogger(AmbariAuthToLocalUserDetailsService.class);
+
+ private final Users users;
+
+ private final List<UserType> userTypeOrder;
+
+ /**
+ * Constructor.
+ * <p>
+ * Given the Ambari {@link Configuration}, initializes the {@link KerberosName} class using
+ * the <code>auth-to-local</code> rules from {@link AmbariKerberosAuthenticationProperties#getAuthToLocalRules()}.
+ *
+ * @param configuration the Ambari configuration data
+ * @param users the Ambari users access object
+ * @throws AmbariException if an error occurs parsing the user-provided auth-to-local rules
+ */
+ public AmbariAuthToLocalUserDetailsService(Configuration configuration, Users users) throws AmbariException {
+ String authToLocalRules = null;
+ List<UserType> orderedUserTypes = null;
+
+ if (configuration != null) {
+ AmbariKerberosAuthenticationProperties properties = configuration.getKerberosAuthenticationProperties();
+
+ if (properties != null) {
+ authToLocalRules = properties.getAuthToLocalRules();
+ orderedUserTypes = properties.getOrderedUserTypes();
+ }
+ }
+
+ if (StringUtils.isEmpty(authToLocalRules)) {
+ authToLocalRules = "DEFAULT";
+ }
+
+ if ((orderedUserTypes == null) || orderedUserTypes.isEmpty()) {
+ orderedUserTypes = Collections.singletonList(UserType.LDAP);
+ }
+
+ KerberosName.setRules(authToLocalRules);
+
+ this.users = users;
+ this.userTypeOrder = orderedUserTypes;
+ }
+
+ @Override
+ public UserDetails loadUserByUsername(String principal) throws UsernameNotFoundException {
+ KerberosName kerberosName = new KerberosName(principal);
+
+ try {
+ String username = kerberosName.getShortName();
+
+ if (username == null) {
+ String message = String.format("Failed to translate %s to a local username during Kerberos authentication.", principal);
+ LOG.warn(message);
+ throw new UsernameNotFoundException(message);
+ }
+
+ LOG.info("Translated {} to {} using auth-to-local rules during Kerberos authentication.", principal, username);
+ return createUser(username);
+ } catch (IOException e) {
+ String message = String.format("Failed to translate %s to a local username during Kerberos authentication: %s", principal, e.getLocalizedMessage());
+ LOG.warn(message);
+ throw new UsernameNotFoundException(message, e);
+ }
+ }
+
+ /**
+ * Given a username, finds an appropriate account in the Ambari database.
+ * <p>
+ * User accounts are searched in order of preferred user type as specified in the Ambari configuration
+ * ({@link Configuration#KERBEROS_AUTH_USER_TYPES}).
+ *
+ * @param username a username
+ * @return the user details of the found user, or <code>null</code> if an appropriate user was not found
+ */
+ private UserDetails createUser(String username) {
+ // Iterate over the ordered user types... when an account for the username/type combination is
+ // found, build the related AmbariUserAuthentication instance and return it. Only the first
+ // match matters... this may be an issue and cause some ambiguity in the event multiple user
+ // types are specified in the configuration and multiple accounts for the same username, but
+ // different types (LOCAL vs LDAP, etc...).
+ for (UserType userType : userTypeOrder) {
+ org.apache.ambari.server.security.authorization.User user = users.getUser(username, userType);
+
+ if (user != null) {
+ Collection<AmbariGrantedAuthority> userAuthorities = users.getUserAuthorities(user.getUserName(), user.getUserType());
+ return new User(username, "", userAuthorities);
+ }
+ }
+
+ String message = String.format("Failed find user account for user with username of %s during Kerberos authentication.", username);
+ LOG.warn(message);
+ throw new UsernameNotFoundException(message);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java
new file mode 100644
index 0000000..a5a3922
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilter.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authentication.kerberos;
+
+import org.apache.ambari.server.audit.AuditLogger;
+import org.apache.ambari.server.audit.event.AuditEvent;
+import org.apache.ambari.server.audit.event.LoginAuditEvent;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.security.authentication.AmbariAuthenticationFilter;
+import org.apache.ambari.server.security.authorization.AuthorizationHelper;
+import org.apache.ambari.server.security.authorization.PermissionHelper;
+import org.apache.ambari.server.utils.RequestUtils;
+import org.springframework.security.authentication.AuthenticationManager;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.AuthenticationException;
+import org.springframework.security.kerberos.web.authentication.SpnegoAuthenticationProcessingFilter;
+import org.springframework.security.web.AuthenticationEntryPoint;
+import org.springframework.security.web.authentication.AuthenticationFailureHandler;
+import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+/**
+ * AmbariKerberosAuthenticationFilter extends the {@link SpnegoAuthenticationProcessingFilter} class
+ * to perform Kerberos-based authentication for Ambari.
+ * <p>
+ * If configured, auditing is performed using {@link AuditLogger}.
+ */
+public class AmbariKerberosAuthenticationFilter extends SpnegoAuthenticationProcessingFilter implements AmbariAuthenticationFilter {
+
+ /**
+ * Audit logger
+ */
+ private final AuditLogger auditLogger;
+
+ /**
+ * A Boolean value indicating whether Kerberos authentication is enabled or not.
+ */
+ private final boolean kerberosAuthenticationEnabled;
+
+ /**
+ * Constructor.
+ * <p>
+ * Given supplied data, sets up the the {@link SpnegoAuthenticationProcessingFilter} to perform
+ * authentication and audit logging if configured do to so.
+ *
+ * @param authenticationManager the Spring authentication manager
+ * @param entryPoint the Spring entry point
+ * @param configuration the Ambari configuration data
+ * @param auditLogger an audit logger
+ * @param permissionHelper a permission helper to aid in audit logging
+ */
+ public AmbariKerberosAuthenticationFilter(AuthenticationManager authenticationManager, final AuthenticationEntryPoint entryPoint, Configuration configuration, final AuditLogger auditLogger, final PermissionHelper permissionHelper) {
+ AmbariKerberosAuthenticationProperties kerberosAuthenticationProperties = (configuration == null)
+ ? null
+ : configuration.getKerberosAuthenticationProperties();
+
+ kerberosAuthenticationEnabled = (kerberosAuthenticationProperties != null) && kerberosAuthenticationProperties.isKerberosAuthenticationEnabled();
+
+ this.auditLogger = auditLogger;
+
+ setAuthenticationManager(authenticationManager);
+
+ setFailureHandler(new AuthenticationFailureHandler() {
+ @Override
+ public void onAuthenticationFailure(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, AuthenticationException e) throws IOException, ServletException {
+ if (auditLogger.isEnabled()) {
+ AuditEvent loginFailedAuditEvent = LoginAuditEvent.builder()
+ .withRemoteIp(RequestUtils.getRemoteAddress(httpServletRequest))
+ .withTimestamp(System.currentTimeMillis())
+ .withReasonOfFailure(e.getLocalizedMessage())
+ .build();
+ auditLogger.log(loginFailedAuditEvent);
+ }
+
+ entryPoint.commence(httpServletRequest, httpServletResponse, e);
+ }
+ });
+
+ setSuccessHandler(new AuthenticationSuccessHandler() {
+ @Override
+ public void onAuthenticationSuccess(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Authentication authentication) throws IOException, ServletException {
+ if (auditLogger.isEnabled()) {
+ AuditEvent loginSucceededAuditEvent = LoginAuditEvent.builder()
+ .withRemoteIp(RequestUtils.getRemoteAddress(httpServletRequest))
+ .withUserName(authentication.getName())
+ .withTimestamp(System.currentTimeMillis())
+ .withRoles(permissionHelper.getPermissionLabels(authentication))
+ .build();
+ auditLogger.log(loginSucceededAuditEvent);
+ }
+ }
+ });
+ }
+
+ /**
+ * Tests to determine if this authentication filter is applicable given the Ambari configuration
+ * and the user's HTTP request.
+ * <p>
+ * If the Ambari configuration indicates the Kerberos authentication is enabled and the HTTP request
+ * contains the appropriate <code>Authorization</code> header, than this filter may be applied;
+ * otherwise it should be skipped.
+ *
+ * @param httpServletRequest the request
+ * @return true if this filter should be applied; false otherwise
+ */
+ @Override
+ public boolean shouldApply(HttpServletRequest httpServletRequest) {
+ if (kerberosAuthenticationEnabled) {
+ String header = httpServletRequest.getHeader("Authorization");
+ return (header != null) && (header.startsWith("Negotiate ") || header.startsWith("Kerberos "));
+ } else {
+ return false;
+ }
+ }
+
+ /**
+ * Performs the logic for this filter.
+ * <p>
+ * Checks whether the authentication information is filled. If it is not, then a login failed audit event is logged.
+ * <p>
+ * Then, forwards the workflow to {@link SpnegoAuthenticationProcessingFilter#doFilter(ServletRequest, ServletResponse, FilterChain)}
+ *
+ * @param servletRequest the request
+ * @param servletResponse the response
+ * @param filterChain the Spring filter chain
+ * @throws IOException
+ * @throws ServletException
+ */
+ @Override
+ public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
+ HttpServletRequest httpServletRequest = (HttpServletRequest) servletRequest;
+
+ if (shouldApply(httpServletRequest)) {
+ if (auditLogger.isEnabled() && (AuthorizationHelper.getAuthenticatedName() == null)) {
+ AuditEvent loginFailedAuditEvent = LoginAuditEvent.builder()
+ .withRemoteIp(RequestUtils.getRemoteAddress(httpServletRequest))
+ .withTimestamp(System.currentTimeMillis())
+ .withReasonOfFailure("Authentication required")
+ .withUserName(null)
+ .build();
+ auditLogger.log(loginFailedAuditEvent);
+ }
+
+ super.doFilter(servletRequest, servletResponse, filterChain);
+ } else {
+ filterChain.doFilter(servletRequest, servletResponse);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java
new file mode 100644
index 0000000..bb57108
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidator.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authentication.kerberos;
+
+import org.apache.ambari.server.configuration.Configuration;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.core.io.FileSystemResource;
+import org.springframework.core.io.Resource;
+import org.springframework.security.authentication.BadCredentialsException;
+import org.springframework.security.kerberos.authentication.KerberosTicketValidation;
+import org.springframework.security.kerberos.authentication.KerberosTicketValidator;
+import org.springframework.security.kerberos.authentication.sun.SunJaasKerberosTicketValidator;
+
+/**
+ * AmbariKerberosTicketValidator is a {@link KerberosTicketValidator} implementation that delegates
+ * to a {@link SunJaasKerberosTicketValidator}, if Kerberos authentication is enabled.
+ * <p>
+ * If Kerberos authentication is enabled, the following properties are set:
+ * <ul>
+ * <li>{@link SunJaasKerberosTicketValidator#setServicePrincipal(String)} using the Ambari server property from {@link Configuration#KERBEROS_AUTH_SPNEGO_PRINCIPAL}</li>
+ * <li>{@link SunJaasKerberosTicketValidator#setKeyTabLocation(Resource)} using the Ambari server property from {@link Configuration#KERBEROS_AUTH_SPNEGO_KEYTAB_FILE}</li>
+ * </ul>
+ */
+public class AmbariKerberosTicketValidator implements KerberosTicketValidator, InitializingBean {
+
+ private final SunJaasKerberosTicketValidator kerberosTicketValidator;
+
+ /**
+ * Creates a new AmbariKerberosTicketValidator
+ *
+ * @param configuration the Ambari server configuration
+ */
+ public AmbariKerberosTicketValidator(Configuration configuration) {
+
+ AmbariKerberosAuthenticationProperties properties = (configuration == null)
+ ? null
+ : configuration.getKerberosAuthenticationProperties();
+
+ if ((properties != null) && properties.isKerberosAuthenticationEnabled()) {
+ kerberosTicketValidator = new SunJaasKerberosTicketValidator();
+ kerberosTicketValidator.setServicePrincipal(properties.getSpnegoPrincipalName());
+
+ if (properties.getSpnegoKeytabFilePath() != null) {
+ kerberosTicketValidator.setKeyTabLocation(new FileSystemResource(properties.getSpnegoKeytabFilePath()));
+ }
+ } else {
+ // Don't create the SunJaasKerberosTicketValidator if Kerberos authentication is not enabled.
+ kerberosTicketValidator = null;
+ }
+ }
+
+ @Override
+ public void afterPropertiesSet() throws Exception {
+ // If Kerberos authentication is enabled, forward this method invocation to the backing
+ // SunJaasKerberosTicketValidator instance.
+ if (kerberosTicketValidator != null) {
+ kerberosTicketValidator.afterPropertiesSet();
+ }
+ }
+
+ @Override
+ public KerberosTicketValidation validateTicket(byte[] bytes) throws BadCredentialsException {
+ // If Kerberos authentication is enabled, forward this method invocation to the backing
+ // SunJaasKerberosTicketValidator instance.
+ return (kerberosTicketValidator == null)
+ ? null
+ : kerberosTicketValidator.validateTicket(bytes);
+ }
+
+ public void setDebug(boolean debug) {
+ // If Kerberos authentication is enabled, forward this method invocation to the backing
+ // SunJaasKerberosTicketValidator instance.
+ if (kerberosTicketValidator != null) {
+ kerberosTicketValidator.setDebug(debug);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index b6b0713..c283a65 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -156,7 +156,15 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
if (ambariServerIdentity != null) {
List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(ambariServerIdentity);
kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
- identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SEVER", "AMBARI_SEVER", kerberosConfigurations, configurations);
+ identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SERVER", "AMBARI_SERVER", kerberosConfigurations, configurations);
+ propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
+ }
+
+ KerberosIdentityDescriptor spnegoIdentity = kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME);
+ if (spnegoIdentity != null) {
+ List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(spnegoIdentity);
+ kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
+ identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SERVER", "SPNEGO", kerberosConfigurations, configurations);
propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
new file mode 100644
index 0000000..9c2c622
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.utilities.KerberosChecker;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.serveraction.ActionLog;
+import org.apache.ambari.server.utils.ShellCommandUtil;
+import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.io.FileUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * ConfigureAmbariIdentitiesServerAction is a ServerAction implementation that creates keytab files as
+ * instructed.
+ * <p/>
+ * This class mainly relies on the KerberosServerAction to iterate through metadata identifying
+ * the Kerberos keytab files that need to be created. For each identity in the metadata, this
+ * implementation's
+ * {@link KerberosServerAction#processIdentity(Map, String, KerberosOperationHandler, Map, Map)}
+ * is invoked attempting the creation of the relevant keytab file.
+ */
+public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction {
+
+
+ private static final String KEYTAB_PATTERN = "keyTab=\"(.+)?\"";
+ private static final String PRINCIPAL_PATTERN = "principal=\"(.+)?\"";
+
+ private final static Logger LOG = LoggerFactory.getLogger(ConfigureAmbariIdentitiesServerAction.class);
+
+ @Inject
+ private KerberosPrincipalHostDAO kerberosPrincipalHostDAO;
+
+ @Inject
+ private HostDAO hostDAO;
+
+ /**
+ * Called to execute this action. Upon invocation, calls
+ * {@link KerberosServerAction#processIdentities(Map)} )}
+ * to iterate through the Kerberos identity metadata and call
+ * {@link ConfigureAmbariIdentitiesServerAction#processIdentities(Map)}
+ * for each identity to process.
+ *
+ * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+ * to a given request
+ * @return a CommandReport indicating the result of this action
+ * @throws AmbariException
+ * @throws InterruptedException
+ */
+ @Override
+ public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
+ AmbariException, InterruptedException {
+ return processIdentities(requestSharedDataContext);
+ }
+
+
+ /**
+ * Creates keytab file for ambari-server identity.
+ * <p/>
+ * It is expected that the {@link CreatePrincipalsServerAction}
+ * (or similar) and {@link CreateKeytabFilesServerAction} has executed before this action.
+ *
+ * @param identityRecord a Map containing the data for the current identity record
+ * @param evaluatedPrincipal a String indicating the relevant principal
+ * @param operationHandler a KerberosOperationHandler used to perform Kerberos-related
+ * tasks for specific Kerberos implementations
+ * (MIT, Active Directory, etc...)
+ * @param kerberosConfiguration a Map of configuration properties from kerberos-env
+ * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+ * to a given request @return a CommandReport, indicating an error
+ * condition; or null, indicating a success condition
+ * @throws AmbariException if an error occurs while processing the identity record
+ */
+ @Override
+ protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal,
+ KerberosOperationHandler operationHandler,
+ Map<String, String> kerberosConfiguration,
+ Map<String, Object> requestSharedDataContext)
+ throws AmbariException {
+ CommandReport commandReport = null;
+
+ if (identityRecord != null) {
+ String message;
+ String dataDirectory = getDataDirectoryPath();
+
+ if (dataDirectory == null) {
+ message = "The data directory has not been set. Generated keytab files can not be stored.";
+ LOG.error(message);
+ commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+ } else {
+
+ String hostName = identityRecord.get(KerberosIdentityDataFileReader.HOSTNAME);
+ if (hostName != null && hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
+ String destKeytabFilePath = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
+ File hostDirectory = new File(dataDirectory, hostName);
+ File srcKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(destKeytabFilePath));
+
+ if (srcKeytabFile.exists()) {
+ installAmbariServerIdentity(evaluatedPrincipal, srcKeytabFile.getAbsolutePath(), destKeytabFilePath, actionLog);
+
+ if ("AMBARI_SERVER".equals(identityRecord.get(KerberosIdentityDataFileReader.COMPONENT))) {
+ // Create/update the JAASFile...
+ configureJAAS(evaluatedPrincipal, destKeytabFilePath, actionLog);
+ }
+ }
+ }
+ }
+ }
+
+ return commandReport;
+ }
+
+ /**
+ * Installs the Ambari Server Kerberos identity by copying its keytab file to the specified location
+ * and then creating the Ambari Server JAAS File.
+ *
+ * @param principal the ambari server principal name
+ * @param srcKeytabFilePath the source location of the ambari server keytab file
+ * @param destKeytabFilePath the destination location of the ambari server keytab file
+ * @param actionLog the logger
+ * @return true if success; false otherwise
+ * @throws AmbariException
+ */
+ public boolean installAmbariServerIdentity(String principal,
+ String srcKeytabFilePath,
+ String destKeytabFilePath,
+ ActionLog actionLog) throws AmbariException {
+
+ // Use sudo to copy the file into place....
+ try {
+ ShellCommandUtil.Result result;
+
+ // Ensure the parent directory exists...
+ File destKeytabFile = new File(destKeytabFilePath);
+ result = ShellCommandUtil.mkdir(destKeytabFile.getParent(), true);
+ if (!result.isSuccessful()) {
+ throw new AmbariException(result.getStderr());
+ }
+
+ // Copy the keytab file into place...
+ result = ShellCommandUtil.copyFile(srcKeytabFilePath, destKeytabFilePath, true, true);
+ if (!result.isSuccessful()) {
+ throw new AmbariException(result.getStderr());
+ } else {
+ String ambariServerHostName = StageUtils.getHostName();
+ HostEntity ambariServerHostEntity = hostDAO.findByName(ambariServerHostName);
+ Long ambariServerHostID = (ambariServerHostEntity == null)
+ ? null
+ : ambariServerHostEntity.getHostId();
+
+ if (ambariServerHostID == null) {
+ String message = String.format("Failed to add the kerberos_principal_host record for %s on " +
+ "the Ambari server host since the host id for Ambari server host, %s, was not found." +
+ " This is not an error if an Ambari agent is not installed on the Ambari server host.",
+ principal, ambariServerHostName);
+ LOG.warn(message);
+ actionLog.writeStdErr(message);
+ } else if (!kerberosPrincipalHostDAO.exists(principal, ambariServerHostID)) {
+ kerberosPrincipalHostDAO.create(principal, ambariServerHostID);
+ }
+
+ actionLog.writeStdOut(String.format("Created Ambari server keytab file for %s at %s", principal, destKeytabFile));
+ }
+ } catch (InterruptedException | IOException e) {
+ throw new AmbariException(e.getLocalizedMessage(), e);
+ }
+
+ return true;
+ }
+
+ private void configureJAAS(String evaluatedPrincipal, String keytabFilePath, ActionLog actionLog) {
+ String jaasConfPath = System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+ if (jaasConfPath != null) {
+ File jaasConfigFile = new File(jaasConfPath);
+ try {
+ String jaasConfig = FileUtils.readFileToString(jaasConfigFile);
+ File oldJaasConfigFile = new File(jaasConfPath + ".bak");
+ FileUtils.writeStringToFile(oldJaasConfigFile, jaasConfig);
+ jaasConfig = jaasConfig.replaceFirst(KEYTAB_PATTERN, "keyTab=\"" + keytabFilePath + "\"");
+ jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + evaluatedPrincipal + "\"");
+ FileUtils.writeStringToFile(jaasConfigFile, jaasConfig);
+ String message = String.format("JAAS config file %s modified successfully for principal %s.", jaasConfigFile
+ .getName(), evaluatedPrincipal);
+ if (actionLog != null) {
+ actionLog.writeStdOut(message);
+ }
+ } catch (IOException e) {
+ String message = String.format("Failed to configure JAAS file %s for %s - %s", jaasConfigFile,
+ evaluatedPrincipal, e.getMessage());
+ if (actionLog != null) {
+ actionLog.writeStdErr(message);
+ }
+ LOG.error(message, e);
+ }
+ } else {
+ String message = String.format("Failed to configure JAAS, config file should be passed to Ambari server as: " +
+ "%s.", KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+ if (actionLog != null) {
+ actionLog.writeStdErr(message);
+ }
+ LOG.error(message);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
deleted file mode 100644
index 96540ef..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.serveraction.kerberos;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.agent.CommandReport;
-import org.apache.ambari.server.controller.KerberosHelper;
-import org.apache.ambari.server.controller.utilities.KerberosChecker;
-import org.apache.ambari.server.serveraction.ActionLog;
-import org.apache.ambari.server.utils.ShellCommandUtil;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.io.FileUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * ConfigureAmbariIndetityServerAction is a ServerAction implementation that creates keytab files as
- * instructed.
- * <p/>
- * This class mainly relies on the KerberosServerAction to iterate through metadata identifying
- * the Kerberos keytab files that need to be created. For each identity in the metadata, this
- * implementation's
- * {@link KerberosServerAction#processIdentity(Map, String, KerberosOperationHandler, Map, Map)}
- * is invoked attempting the creation of the relevant keytab file.
- */
-public class ConfigureAmbariIndetityServerAction extends KerberosServerAction {
-
-
- private static final String KEYTAB_PATTERN = "keyTab=\"(.+)?\"";
- private static final String PRINCIPAL_PATTERN = "principal=\"(.+)?\"";
-
- private final static Logger LOG = LoggerFactory.getLogger(ConfigureAmbariIndetityServerAction.class);
-
- /**
- * Called to execute this action. Upon invocation, calls
- * {@link KerberosServerAction#processIdentities(Map)} )}
- * to iterate through the Kerberos identity metadata and call
- * {@link ConfigureAmbariIndetityServerAction#processIdentities(Map)}
- * for each identity to process.
- *
- * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
- * to a given request
- * @return a CommandReport indicating the result of this action
- * @throws AmbariException
- * @throws InterruptedException
- */
- @Override
- public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
- AmbariException, InterruptedException {
- return processIdentities(requestSharedDataContext);
- }
-
-
- /**
- * Creates keytab file for ambari-server identity.
- * <p/>
- * It is expected that the {@link CreatePrincipalsServerAction}
- * (or similar) and {@link CreateKeytabFilesServerAction} has executed before this action.
- *
- * @param identityRecord a Map containing the data for the current identity record
- * @param evaluatedPrincipal a String indicating the relevant principal
- * @param operationHandler a KerberosOperationHandler used to perform Kerberos-related
- * tasks for specific Kerberos implementations
- * (MIT, Active Directory, etc...)
- * @param kerberosConfiguration a Map of configuration properties from kerberos-env
- * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
- * to a given request @return a CommandReport, indicating an error
- * condition; or null, indicating a success condition
- * @throws AmbariException if an error occurs while processing the identity record
- */
- @Override
- protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal,
- KerberosOperationHandler operationHandler,
- Map<String, String> kerberosConfiguration,
- Map<String, Object> requestSharedDataContext)
- throws AmbariException {
- CommandReport commandReport = null;
-
- if (identityRecord != null) {
- String message;
- String dataDirectory = getDataDirectoryPath();
-
- if (operationHandler == null) {
- message = String.format("Failed to create keytab file for %s, missing KerberosOperationHandler", evaluatedPrincipal);
- actionLog.writeStdErr(message);
- LOG.error(message);
- commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
- } else if (dataDirectory == null) {
- message = "The data directory has not been set. Generated keytab files can not be stored.";
- LOG.error(message);
- commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
- } else {
-
- String hostName = identityRecord.get(KerberosIdentityDataFileReader.HOSTNAME);
- if (hostName != null && hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
- String destKeytabFilePath = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
- File hostDirectory = new File(dataDirectory, hostName);
- File srcKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(destKeytabFilePath));
-
- if(srcKeytabFile.exists()) {
- installAmbariServerIdentity(evaluatedPrincipal, srcKeytabFile.getAbsolutePath(), destKeytabFilePath, actionLog);
- }
- }
- }
- }
-
- return commandReport;
- }
-
- /**
- * Installs the Ambari Server Kerberos identity by copying its keytab file to the specified location
- * and then creating the Ambari Server JAAS File.
- *
- * @param principal the ambari server principal name
- * @param srcKeytabFilePath the source location of the ambari server keytab file
- * @param destKeytabFilePath the destination location of the ambari server keytab file
- * @param actionLog the logger
- * @return true if success; false otherwise
- * @throws AmbariException
- */
- public boolean installAmbariServerIdentity(String principal,
- String srcKeytabFilePath,
- String destKeytabFilePath,
- ActionLog actionLog) throws AmbariException {
-
- // Use sudo to copy the file into place....
- try {
- ShellCommandUtil.Result result;
-
- // Ensure the parent directory exists...
- File destKeytabFile = new File(destKeytabFilePath);
- result = ShellCommandUtil.mkdir(destKeytabFile.getParent(), true);
- if (!result.isSuccessful()) {
- throw new AmbariException(result.getStderr());
- }
-
- // Copy the keytab file into place...
- result = ShellCommandUtil.copyFile(srcKeytabFilePath, destKeytabFilePath, true, true);
- if (!result.isSuccessful()) {
- throw new AmbariException(result.getStderr());
- }
- } catch (InterruptedException | IOException e) {
- throw new AmbariException(e.getLocalizedMessage(), e);
- }
-
- // Create/update the JAASFile...
- configureJAAS(principal, destKeytabFilePath, actionLog);
-
- return true;
- }
-
- private void configureJAAS(String evaluatedPrincipal, String keytabFilePath, ActionLog actionLog) {
- String jaasConfPath = System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
- if (jaasConfPath != null) {
- File jaasConfigFile = new File(jaasConfPath);
- try {
- String jaasConfig = FileUtils.readFileToString(jaasConfigFile);
- File oldJaasConfigFile = new File(jaasConfPath + ".bak");
- FileUtils.writeStringToFile(oldJaasConfigFile, jaasConfig);
- jaasConfig = jaasConfig.replaceFirst(KEYTAB_PATTERN, "keyTab=\"" + keytabFilePath + "\"");
- jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + evaluatedPrincipal + "\"");
- FileUtils.writeStringToFile(jaasConfigFile, jaasConfig);
- String message = String.format("JAAS config file %s modified successfully for principal %s.", jaasConfigFile
- .getName(), evaluatedPrincipal);
- if (actionLog != null) {
- actionLog.writeStdOut(message);
- }
- } catch (IOException e) {
- String message = String.format("Failed to configure JAAS file %s for %s - %s", jaasConfigFile,
- evaluatedPrincipal, e.getMessage());
- if (actionLog != null) {
- actionLog.writeStdErr(message);
- }
- LOG.error(message, e);
- }
- } else {
- String message = String.format("Failed to configure JAAS, config file should be passed to Ambari server as: " +
- "%s.", KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
- if (actionLog != null) {
- actionLog.writeStdErr(message);
- }
- LOG.error(message);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
index e31e6ff..b99c25a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
@@ -18,7 +18,6 @@
package org.apache.ambari.server.serveraction.kerberos;
-import com.google.common.base.Optional;
import com.google.inject.Inject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
@@ -145,7 +144,7 @@ public class CreatePrincipalsServerAction extends KerberosServerAction {
// This principal has been processed and a keytab file has been distributed... do not process it.
processPrincipal = false;
} else {
- // This principal has been processed but a keytab file for it has been distributed... process it.
+ // This principal has been processed but a keytab file for it has not been distributed... process it.
processPrincipal = true;
}
}
@@ -232,7 +231,7 @@ public class CreatePrincipalsServerAction extends KerberosServerAction {
String password = securePasswordHelper.createSecurePassword(length, minLowercaseLetters, minUppercaseLetters, minDigits, minPunctuation, minWhitespace);
try {
- /**
+ /*
* true indicates a new principal was created, false indicates an existing principal was updated
*/
boolean created;
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index db210e0..1d8c1ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -28,6 +28,7 @@ import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
import org.apache.ambari.server.serveraction.AbstractServerAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.utils.StageUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -514,12 +515,19 @@ public abstract class KerberosServerAction extends AbstractServerAction {
if (record != null) {
String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
- String host = record.get(KerberosIdentityDataFileReader.HOSTNAME);
if (principal != null) {
+ String hostname = record.get(KerberosIdentityDataFileReader.HOSTNAME);
+
+ if(KerberosHelper.AMBARI_SERVER_HOST_NAME.equals(hostname)) {
+ // Replace KerberosHelper.AMBARI_SERVER_HOST_NAME with the actual hostname where the Ambari
+ // server is... this host
+ hostname = StageUtils.getHostName();
+ }
+
// Evaluate the principal "pattern" found in the record to generate the "evaluated principal"
// by replacing the _HOST and _REALM variables.
- String evaluatedPrincipal = principal.replace("_HOST", host).replace("_REALM", defaultRealm);
+ String evaluatedPrincipal = principal.replace("_HOST", hostname).replace("_REALM", defaultRealm);
commandReport = processIdentity(record, evaluatedPrincipal, operationHandler, kerberosConfiguration, requestSharedDataContext);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml b/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
index a86973c..500c0bf 100644
--- a/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
+++ b/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
@@ -30,16 +30,11 @@
<custom-filter ref="ambariAuthorizationFilter" before="FILTER_SECURITY_INTERCEPTOR"/>
</http>
- <!--<ldap-server id="ldapServer" root="dc=ambari,dc=apache,dc=org"/>-->
-
<authentication-manager alias="authenticationManager">
-
<authentication-provider ref="ambariLocalAuthenticationProvider"/>
-
<authentication-provider ref="ambariLdapAuthenticationProvider"/>
-
<authentication-provider ref="ambariInternalAuthenticationProvider"/>
-
+ <authentication-provider ref="kerberosServiceAuthenticationProvider"/>
</authentication-manager>
<beans:bean id="ambariEntryPoint" class="org.apache.ambari.server.security.AmbariEntryPoint">
@@ -49,6 +44,7 @@
<beans:constructor-arg>
<beans:list>
<beans:ref bean="ambariBasicAuthenticationFilter"/>
+ <beans:ref bean="ambariKerberosAuthenticationFilter"/>
<beans:ref bean="ambariJwtAuthenticationFilter"/>
</beans:list>
</beans:constructor-arg>
@@ -69,6 +65,14 @@
<beans:constructor-arg ref="permissionHelper"/>
</beans:bean>
+ <beans:bean id="ambariKerberosAuthenticationFilter" class="org.apache.ambari.server.security.authentication.kerberos.AmbariKerberosAuthenticationFilter">
+ <beans:constructor-arg ref="authenticationManager"/>
+ <beans:constructor-arg ref="ambariEntryPoint"/>
+ <beans:constructor-arg ref="ambariConfiguration"/>
+ <beans:constructor-arg ref="auditLogger"/>
+ <beans:constructor-arg ref="permissionHelper"/>
+ </beans:bean>
+
<beans:bean id="ambariAuthorizationFilter" class="org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter">
<beans:constructor-arg ref="ambariEntryPoint"/>
<beans:constructor-arg ref="ambariConfiguration"/>
@@ -77,4 +81,20 @@
<beans:constructor-arg ref="permissionHelper"/>
</beans:bean>
+ <beans:bean id="kerberosServiceAuthenticationProvider" class="org.springframework.security.kerberos.authentication.KerberosServiceAuthenticationProvider">
+ <beans:property name="ticketValidator">
+ <beans:bean class="org.apache.ambari.server.security.authentication.kerberos.AmbariKerberosTicketValidator">
+ <beans:constructor-arg ref="ambariConfiguration"/>
+ <beans:property name="debug" value="false"/>
+ </beans:bean>
+ </beans:property>
+
+ <beans:property name="userDetailsService" ref="authToLocalUserDetailsService"/>
+ </beans:bean>
+
+ <beans:bean id="authToLocalUserDetailsService" class="org.apache.ambari.server.security.authentication.kerberos.AmbariAuthToLocalUserDetailsService">
+ <beans:constructor-arg ref="ambariConfiguration"/>
+ <beans:constructor-arg ref="ambariUsers"/>
+ </beans:bean>
+
</beans:beans>
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 3c97ce9..7e6a056 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -104,7 +104,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
-import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -3755,6 +3754,7 @@ public class KerberosHelperTest extends EasyMockSupport {
KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
if (createAmbariPrincipal) {
expect(kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME)).andReturn(ambariKerberosIdentity).once();
+ expect(kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME)).andReturn(ambariKerberosIdentity).once();
}
List<KerberosIdentityDescriptor> identities = new ArrayList<KerberosIdentityDescriptor>();
@@ -3764,12 +3764,12 @@ public class KerberosHelperTest extends EasyMockSupport {
// Needed by infrastructure
injector.getInstance(AmbariMetaInfo.class).init();
- kerberosHelper.addAmbariServerIdentity(kerberosEnvProperties, kerberosDescriptor, identities);
+ kerberosHelper.addAmbariServerIdentities(kerberosEnvProperties, kerberosDescriptor, identities);
verifyAll();
if (createAmbariPrincipal) {
- Assert.assertEquals(1, identities.size());
+ Assert.assertEquals(2, identities.size());
Assert.assertSame(ambariKerberosIdentity, identities.get(0));
} else {
Assert.assertTrue(identities.isEmpty());
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
new file mode 100644
index 0000000..4b93f2f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsServiceTest.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authentication.kerberos;
+
+import junit.framework.Assert;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
+import org.apache.ambari.server.security.authorization.User;
+import org.apache.ambari.server.security.authorization.UserType;
+import org.apache.ambari.server.security.authorization.Users;
+import org.easymock.EasyMockSupport;
+import org.junit.Test;
+import org.springframework.security.core.userdetails.UserDetails;
+import org.springframework.security.core.userdetails.UserDetailsService;
+import org.springframework.security.core.userdetails.UsernameNotFoundException;
+
+import java.util.Collection;
+import java.util.Collections;
+
+import static org.easymock.EasyMock.expect;
+
+public class AmbariAuthToLocalUserDetailsServiceTest extends EasyMockSupport {
+ @Test
+ public void loadUserByUsernameSuccess() throws Exception {
+ AmbariKerberosAuthenticationProperties properties = new AmbariKerberosAuthenticationProperties();
+
+ Configuration configuration = createMock(Configuration.class);
+ expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
+
+ User user = createMock(User.class);
+ expect(user.getUserName()).andReturn("user1").once();
+ expect(user.getUserType()).andReturn(UserType.LDAP).once();
+
+ Collection<AmbariGrantedAuthority> userAuthorities = Collections.singletonList(createNiceMock(AmbariGrantedAuthority.class));
+
+ Users users = createMock(Users.class);
+ expect(users.getUser("user1", UserType.LDAP)).andReturn(user).once();
+ expect(users.getUserAuthorities("user1", UserType.LDAP)).andReturn(userAuthorities).once();
+
+ replayAll();
+
+ // These system properties need to be set to properly configure the KerberosName object when
+ // a krb5.conf file is not available
+ System.setProperty("java.security.krb5.realm", "EXAMPLE.COM");
+ System.setProperty("java.security.krb5.kdc", "localhost");
+
+ UserDetailsService userdetailsService = new AmbariAuthToLocalUserDetailsService(configuration, users);
+
+ UserDetails userDetails = userdetailsService.loadUserByUsername("user1@EXAMPLE.COM");
+
+ verifyAll();
+
+ Assert.assertNotNull(userDetails);
+ Assert.assertEquals("user1", userDetails.getUsername());
+ Assert.assertEquals(userAuthorities.size(), userDetails.getAuthorities().size());
+ Assert.assertEquals("", userDetails.getPassword());
+ }
+
+ @Test(expected = UsernameNotFoundException.class)
+ public void loadUserByUsernameUserNotFound() throws Exception {
+ AmbariKerberosAuthenticationProperties properties = new AmbariKerberosAuthenticationProperties();
+
+ Configuration configuration = createMock(Configuration.class);
+ expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
+
+ Users users = createMock(Users.class);
+ expect(users.getUser("user1", UserType.LDAP)).andReturn(null).once();
+ expect(users.getUser("user1", UserType.LOCAL)).andReturn(null).once();
+
+ replayAll();
+
+ UserDetailsService userdetailsService = new AmbariAuthToLocalUserDetailsService(configuration, users);
+
+ userdetailsService.loadUserByUsername("user1@EXAMPLE.COM");
+
+ verifyAll();
+
+ Assert.fail("UsernameNotFoundException was not thrown");
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java
new file mode 100644
index 0000000..d855cda
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosAuthenticationFilterTest.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authentication.kerberos;
+
+import org.apache.ambari.server.audit.AuditLogger;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.security.authorization.PermissionHelper;
+import org.easymock.EasyMockSupport;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.security.authentication.AuthenticationManager;
+import org.springframework.security.web.AuthenticationEntryPoint;
+
+import javax.servlet.http.HttpServletRequest;
+
+import static org.easymock.EasyMock.expect;
+
+public class AmbariKerberosAuthenticationFilterTest extends EasyMockSupport {
+ @Test
+ public void shouldApplyTrue() throws Exception {
+ HttpServletRequest httpServletRequest = createMock(HttpServletRequest.class);
+ expect(httpServletRequest.getHeader("Authorization")).andReturn("Negotiate .....").once();
+
+ AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
+ expect(properties.isKerberosAuthenticationEnabled()).andReturn(true).once();
+
+ Configuration configuration = createMock(Configuration.class);
+ expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
+
+ AuthenticationManager authenticationManager = createMock(AuthenticationManager.class);
+ AuthenticationEntryPoint entryPoint = createMock(AuthenticationEntryPoint.class);
+ AuditLogger auditLogger = createMock(AuditLogger.class);
+ PermissionHelper permissionHelper = createMock(PermissionHelper.class);
+
+ replayAll();
+
+ AmbariKerberosAuthenticationFilter filter = new AmbariKerberosAuthenticationFilter(
+ authenticationManager,
+ entryPoint,
+ configuration,
+ auditLogger,
+ permissionHelper
+ );
+
+ Assert.assertTrue(filter.shouldApply(httpServletRequest));
+
+ verifyAll();
+ }
+
+ @Test
+ public void shouldApplyFalseMissingHeader() throws Exception {
+ HttpServletRequest httpServletRequest = createMock(HttpServletRequest.class);
+ expect(httpServletRequest.getHeader("Authorization")).andReturn(null).once();
+
+ AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
+ expect(properties.isKerberosAuthenticationEnabled()).andReturn(true).once();
+
+ Configuration configuration = createMock(Configuration.class);
+ expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
+
+ AuthenticationManager authenticationManager = createMock(AuthenticationManager.class);
+ AuthenticationEntryPoint entryPoint = createMock(AuthenticationEntryPoint.class);
+ AuditLogger auditLogger = createMock(AuditLogger.class);
+ PermissionHelper permissionHelper = createMock(PermissionHelper.class);
+
+ replayAll();
+
+ AmbariKerberosAuthenticationFilter filter = new AmbariKerberosAuthenticationFilter(
+ authenticationManager,
+ entryPoint,
+ configuration,
+ auditLogger,
+ permissionHelper
+ );
+
+ Assert.assertFalse(filter.shouldApply(httpServletRequest));
+
+ verifyAll();
+ }
+
+ @Test
+ public void shouldApplyNotFalseEnabled() throws Exception {
+ HttpServletRequest httpServletRequest = createMock(HttpServletRequest.class);
+
+ AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
+ expect(properties.isKerberosAuthenticationEnabled()).andReturn(false).once();
+
+ Configuration configuration = createMock(Configuration.class);
+ expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
+
+ AuthenticationManager authenticationManager = createMock(AuthenticationManager.class);
+ AuthenticationEntryPoint entryPoint = createMock(AuthenticationEntryPoint.class);
+ AuditLogger auditLogger = createMock(AuditLogger.class);
+ PermissionHelper permissionHelper = createMock(PermissionHelper.class);
+
+ replayAll();
+
+ AmbariKerberosAuthenticationFilter filter = new AmbariKerberosAuthenticationFilter(
+ authenticationManager,
+ entryPoint,
+ configuration,
+ auditLogger,
+ permissionHelper
+ );
+
+ Assert.assertFalse(filter.shouldApply(httpServletRequest));
+
+ verifyAll();
+ }
+
+ @Test
+ public void doFilter() throws Exception {
+ // Skip this test since the real work is being done by SpnegoAuthenticationProcessingFilter, which
+ // is a class in the Spring libraries.
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e08470c/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java
new file mode 100644
index 0000000..9bc87a4
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authentication/kerberos/AmbariKerberosTicketValidatorTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authentication.kerberos;
+
+import org.apache.ambari.server.configuration.Configuration;
+import org.easymock.EasyMockSupport;
+import org.junit.Test;
+
+import static org.easymock.EasyMock.expect;
+
+public class AmbariKerberosTicketValidatorTest extends EasyMockSupport {
+
+ /**
+ * Tests an {@link AmbariKerberosTicketValidator} to ensure that the Spnego identity is properly
+ * set in the base class during construction.
+ */
+ @Test
+ public void testConstructor() throws NoSuchMethodException {
+ AmbariKerberosAuthenticationProperties properties = createMock(AmbariKerberosAuthenticationProperties.class);
+ expect(properties.isKerberosAuthenticationEnabled()).andReturn(true).once();
+ expect(properties.getSpnegoPrincipalName()).andReturn("HTTP/somehost.example.com").times(1);
+ expect(properties.getSpnegoKeytabFilePath()).andReturn("/etc/security/keytabs/spnego.service.keytab").times(2);
+
+ Configuration configuration = createMock(Configuration.class);
+ expect(configuration.getKerberosAuthenticationProperties()).andReturn(properties).once();
+
+ replayAll();
+
+ new AmbariKerberosTicketValidator(configuration);
+
+ verifyAll();
+ }
+}
\ No newline at end of file
[11/16] ambari git commit: AMBARI-18415. Stack definition: service
component 'cardinality' inheritance not working properly (aonishuk)
Posted by nc...@apache.org.
AMBARI-18415. Stack definition: service component 'cardinality' inheritance not working properly (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/533015b4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/533015b4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/533015b4
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 533015b4f26fa24db392271ec07d16825a03030c
Parents: 2c7bd2f
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 22 17:40:45 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 22 17:40:45 2016 +0300
----------------------------------------------------------------------
.../src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/533015b4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
index 1a3e2e3..4ba3cf1 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
@@ -233,6 +233,7 @@
<component>
<name>HCAT</name>
<displayName>HCat Client</displayName>
+ <cardinality>0+</cardinality>
<category>CLIENT</category>
<versionAdvertised>true</versionAdvertised>
<commandScript>
[12/16] ambari git commit: AMBARI-18446. Review necessity of xmllint
and remove libxml2-utils package from Oozie manifest (aonishuk)
Posted by nc...@apache.org.
AMBARI-18446. Review necessity of xmllint and remove libxml2-utils package from Oozie manifest (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/39ed8e4f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/39ed8e4f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/39ed8e4f
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 39ed8e4f621a8dcdc55dde8584029aa7a4636527
Parents: 533015b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 22 17:43:09 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 22 17:43:09 2016 +0300
----------------------------------------------------------------------
.../common-services/OOZIE/4.0.0.2.0/metainfo.xml | 3 ---
.../OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh | 5 -----
.../4.0.0.2.0/package/scripts/service_check.py | 13 -------------
.../common-services/OOZIE/4.2.0.2.3/metainfo.xml | 3 ---
.../stacks/HDP/2.1/services/OOZIE/metainfo.xml | 3 ---
.../stacks/HDP/2.2/services/OOZIE/metainfo.xml | 3 ---
.../stacks/2.0.6/OOZIE/test_oozie_service_check.py | 8 --------
.../python/stacks/2.0.6/OOZIE/test_service_check.py | 16 ----------------
8 files changed, 54 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/metainfo.xml
index 58d2581..66d272a 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/metainfo.xml
@@ -142,9 +142,6 @@
<package>
<name>extjs</name>
</package>
- <package>
- <name>libxml2-utils</name>
- </package>
</packages>
</osSpecific>
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index df86eef..71a0c85 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -34,11 +34,6 @@ export smoke_user_keytab=${11}
export kinit_path_local=${12}
export smokeuser_principal=${13}
-function getValueFromField {
- xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
- return $?
-}
-
function checkOozieJobStatus {
local job_id=$1
local num_of_tries=$2
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
index 49fc129..3710117 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
@@ -26,8 +26,6 @@ from resource_management.core.source import StaticFile
from resource_management.core.system import System
from resource_management.libraries.functions import format
from resource_management.libraries.script import Script
-from resource_management.libraries.resources.xml_config import XmlConfig
-from resource_management.core.exceptions import Fail
from ambari_commons.os_family_impl import OsFamilyImpl
from ambari_commons import OSConst
@@ -47,17 +45,6 @@ class OozieServiceCheckDefault(OozieServiceCheck):
prepare_hdfs_file_name = 'prepareOozieHdfsDirectories.sh'
smoke_test_file_name = 'oozieSmoke2.sh'
- if 'yarn-site' in params.config['configurations']:
- XmlConfig("yarn-site.xml",
- conf_dir=params.hadoop_conf_dir,
- configurations=params.config['configurations']['yarn-site'],
- owner=params.hdfs_user,
- group=params.user_group,
- mode=0644
- )
- else:
- raise Fail("yarn-site.xml was not present in config parameters.")
-
OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name, prepare_hdfs_file_name)
@staticmethod
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/metainfo.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/metainfo.xml
index f9b0f0b..96ac9ce 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/metainfo.xml
@@ -155,9 +155,6 @@
<package>
<name>extjs</name>
</package>
- <package>
- <name>libxml2-utils</name>
- </package>
</packages>
</osSpecific>
</osSpecifics>
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/metainfo.xml
index 997c865..eab7ae0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/metainfo.xml
@@ -56,9 +56,6 @@
<package>
<name>extjs</name>
</package>
- <package>
- <name>libxml2-utils</name>
- </package>
</packages>
</osSpecific>
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
index 8de041a..cb84a5d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
@@ -97,9 +97,6 @@
<package>
<name>extjs</name>
</package>
- <package>
- <name>libxml2-utils</name>
- </package>
</packages>
</osSpecific>
</osSpecifics>
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
index 22d7d63..c3b4ce6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
@@ -53,14 +53,6 @@ class TestOozieServiceCheck(RMFTestCase):
self.maxDiff = None
- self.assertResourceCalled('XmlConfig',
- "yarn-site.xml",
- owner = "hdfs",
- group = "hadoop",
- mode = 0644,
- conf_dir="/usr/hdp/current/hadoop-client/conf",
- configurations = json_content['configurations']['yarn-site'])
-
self.assertResourceCalled('File',
"/tmp/oozieSmoke2.sh",
content = StaticFile("oozieSmoke2.sh"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/39ed8e4f/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
index c96faff..2626578 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
@@ -37,14 +37,6 @@ class TestServiceCheck(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- mode = 0644,
- configurations = self.getConfig()['configurations']['yarn-site'],
- )
-
self.assert_service_check()
self.assertNoMoreResources()
@@ -58,14 +50,6 @@ class TestServiceCheck(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- mode = 0644,
- configurations = self.getConfig()['configurations']['yarn-site'],
- )
-
self.assert_service_check()
self.assertNoMoreResources()
[04/16] ambari git commit: AMBARI-18436. Add service wizard:
Customize service page keeps showing spinner. (jaimin)
Posted by nc...@apache.org.
AMBARI-18436. Add service wizard: Customize service page keeps showing spinner. (jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/89eebb0f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/89eebb0f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/89eebb0f
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 89eebb0f553e8667e0eb82c7f96d862b5a37faf8
Parents: 3939afa
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Wed Sep 21 15:50:43 2016 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Wed Sep 21 16:00:14 2016 -0700
----------------------------------------------------------------------
ambari-web/app/controllers/wizard/step7_controller.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/89eebb0f/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index b8678a5..dc030be 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -807,7 +807,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
if (service.get('serviceName') === 'YARN') {
configsByService[service.get('serviceName')] = App.config.addYarnCapacityScheduler(configsByService[service.get('serviceName')]);
}
- service.set('configs', configsByService[service.get('serviceName')]);
+ service.set('configs', configsByService[service.get('serviceName')] || []);
if (['addServiceController', 'installerController'].contains(this.get('wizardController.name'))) {
this.addHostNamesToConfigs(service, localDB.masterComponentHosts, localDB.slaveComponentHosts);
}