You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mr...@apache.org on 2017/11/27 23:29:21 UTC
[09/30] ambari git commit: Merge trunk with feature branch and fix
some UT compilation issues (mradhakrishnan)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java
new file mode 100644
index 0000000..d89840f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.stack.upgrade;
+
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlType;
+
+import org.apache.ambari.server.serveraction.upgrades.CreateAndConfigureAction;
+
+/**
+ * The {@link CreateAndConfigureTask} represents a two step change where the create is for creating a config type if it does not exist
+ * followed by the configuration change.
+ * This task contains id of change. Change definitions are located in a separate file (config
+ * upgrade pack). IDs of change definitions share the same namespace within all stacks.
+ *
+ *
+ * <p/>
+ *
+ * <pre>
+ * {@code
+ * <task xsi:type="create_and_configure" id="hdp_2_3_0_0-UpdateHiveConfig"/>
+ * }
+ * </pre>
+ *
+ */
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name="create_and_configure")
+public class CreateAndConfigureTask extends ConfigureTask {
+
+ public static final String actionVerb = "CreateAndConfiguring";
+
+ /**
+ * Constructor.
+ */
+ public CreateAndConfigureTask() {
+ implClass = CreateAndConfigureAction.class.getName();
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
index 8f34613..39e5d5c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
@@ -20,19 +20,20 @@ package org.apache.ambari.server.state.stack.upgrade;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.agent.CommandRepository;
-import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.ActionExecutionContext;
-import org.apache.ambari.server.controller.ActionExecutionContext.ExecutionCommandVisitor;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.internal.OperatingSystemResourceProvider;
import org.apache.ambari.server.controller.internal.RepositoryResourceProvider;
@@ -41,10 +42,20 @@ import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
import org.apache.ambari.server.orm.entities.RepositoryEntity;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.RepositoryInfo;
+import org.apache.ambari.server.state.RepositoryType;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.ServiceOsSpecific;
import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.repository.ClusterVersionSummary;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.ambari.server.state.stack.RepoTag;
import org.apache.ambari.server.state.stack.UpgradePack;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
@@ -61,6 +72,7 @@ import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
+
/**
* Provides helper methods to manage repository versions.
*/
@@ -78,6 +90,51 @@ public class RepositoryVersionHelper {
@Inject
private Provider<Configuration> configuration;
+ @Inject
+ private Provider<OsFamily> os_family;
+
+ @Inject Provider<Clusters> clusters;
+
+
+ /**
+ * Checks repo URLs against the current version for the cluster and make
+ * adjustments to the Base URL when the current is different.
+ *
+ * @param cluster {@link Cluster} object
+ * @param component resolve {@link RepositoryVersionEntity} for the component, could be {@code null}
+ *
+ * @return {@link RepositoryVersionEntity} retrieved for component if set or cluster if not
+ */
+ @Experimental(feature = ExperimentalFeature.PATCH_UPGRADES)
+ private RepositoryVersionEntity getRepositoryVersionEntity(Cluster cluster, ServiceComponent component) throws SystemException {
+
+ RepositoryVersionEntity repositoryEntity = null;
+
+ // !!! try to find the component repo first
+ if (null != component) {
+ repositoryEntity = component.getDesiredRepositoryVersion();
+ } else {
+ LOG.info("Service component not passed in, attempt to resolve the repository for cluster {}",
+ cluster.getClusterName());
+ }
+
+ if (null == repositoryEntity && null != component) {
+ try {
+ Service service = cluster.getService(component.getServiceName());
+ repositoryEntity = service.getDesiredRepositoryVersion();
+ } catch (AmbariException e) {
+ throw new SystemException("Unhandled exception", e);
+ }
+ }
+
+ if (null == repositoryEntity) {
+ LOG.info("Cluster {} has no specific Repository Versions. Using stack-defined values", cluster.getClusterName());
+ return null;
+ }
+
+ return repositoryEntity;
+ }
+
/**
* Parses operating systems json to a list of entities. Expects json like:
* <pre>
@@ -136,6 +193,17 @@ public class RepositoryVersionHelper {
if (repositoryJson.getAsJsonObject().get(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID) != null) {
repositoryEntity.setUnique(repositoryJson.getAsJsonObject().get(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID).getAsBoolean());
}
+
+ if (null != repositoryJson.get(RepositoryResourceProvider.REPOSITORY_TAGS_PROPERTY_ID)) {
+ Set<RepoTag> tags = new HashSet<>();
+
+ JsonArray jsonArray = repositoryJson.get(RepositoryResourceProvider.REPOSITORY_TAGS_PROPERTY_ID).getAsJsonArray();
+ for(JsonElement je : jsonArray) {
+ tags.add(RepoTag.valueOf(je.getAsString()));
+ }
+ repositoryEntity.setTags(tags);
+ }
+
operatingSystemEntity.getRepositories().add(repositoryEntity);
}
operatingSystems.add(operatingSystemEntity);
@@ -187,6 +255,11 @@ public class RepositoryVersionHelper {
repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID, repository.getComponents());
repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_MIRRORS_LIST_PROPERTY_ID, repository.getMirrorsList());
repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID, repository.isUnique());
+
+ // add the tags even if there are none
+ JsonArray tags = gson.toJsonTree(repository.getTags()).getAsJsonArray();
+ repositoryJson.add(RepositoryResourceProvider.REPOSITORY_TAGS_PROPERTY_ID, tags);
+
repositoriesJson.add(repositoryJson);
operatingSystemJson.addProperty(OperatingSystemResourceProvider.OPERATING_SYSTEM_AMBARI_MANAGED_REPOS, repository.isAmbariManagedRepositories());
}
@@ -306,17 +379,46 @@ public class RepositoryVersionHelper {
return roleParams;
}
+
+ /**
+ * Return repositories available for target os version on host based on {@code repoVersion} repository definition
+ * @param host target {@link Host} for providing repositories list
+ * @param repoVersion {@link RepositoryVersionEntity} version definition with all available repositories
+ *
+ * @return {@link OperatingSystemEntity} with available repositories for host
+ * @throws SystemException if no repository available for target {@link Host}
+ */
+ public OperatingSystemEntity getOSEntityForHost(Host host, RepositoryVersionEntity repoVersion) throws SystemException {
+ String osFamily = host.getOsFamily();
+ OperatingSystemEntity osEntity = null;
+ for (OperatingSystemEntity operatingSystem : repoVersion.getOperatingSystems()) {
+ if (osFamily.equals(operatingSystem.getOsType())) {
+ osEntity = operatingSystem;
+ break;
+ }
+ }
+
+ if (null == osEntity) {
+ throw new SystemException(String.format("Operating System matching %s could not be found",
+ osFamily));
+ }
+
+ return osEntity;
+ }
+
/**
* Adds a command repository to the action context
- * @param context the context
* @param osEntity the OS family
- * @param repoVersion the repository version entity
*/
- public void addCommandRepository(ActionExecutionContext context,
- RepositoryVersionEntity repoVersion, OperatingSystemEntity osEntity) {
+ public CommandRepository getCommandRepository(final RepositoryVersionEntity repoVersion,
+ final OperatingSystemEntity osEntity) throws SystemException {
final CommandRepository commandRepo = new CommandRepository();
- boolean sysPreppedHost = configuration.get().areHostsSysPrepped().equalsIgnoreCase("true");
+ final boolean sysPreppedHost = configuration.get().areHostsSysPrepped().equalsIgnoreCase("true");
+
+ if (null == repoVersion) {
+ throw new SystemException("Repository version entity is not provided");
+ }
commandRepo.setRepositories(osEntity.getOsType(), osEntity.getRepositories());
commandRepo.setRepositoryVersion(repoVersion.getVersion());
@@ -343,14 +445,226 @@ public class RepositoryVersionHelper {
LOG.warn("Legacy override option is turned on, disabling CommandRepositoryFeature.scoped feature");
commandRepo.getFeature().setIsScoped(false);
}
+ return commandRepo;
+ }
+
- context.addVisitor(new ExecutionCommandVisitor() {
- @Override
- public void visit(ExecutionCommand command) {
- if (null == command.getRepositoryFile()) {
- command.setRepositoryFile(commandRepo);
+ /**
+ * Builds repository information for inclusion in a command. This replaces escaping json on
+ * a command.
+ *
+ * @param cluster the cluster
+ * @param host the host
+ * @param component {@link ServiceComponent} object, could be null to return service-related repository
+ * @return the command repository
+ * @throws SystemException
+ */
+ @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
+ public CommandRepository getCommandRepository(final Cluster cluster, ServiceComponent component, final Host host)
+ throws SystemException {
+
+ RepositoryVersionEntity repoVersion = getRepositoryVersionEntity(cluster, component);
+ OperatingSystemEntity osEntity = getOSEntityForHost(host, repoVersion);
+
+ return getCommandRepository(repoVersion, osEntity);
+ }
+
+ /**
+ * This method builds and adds repo infoto hostLevelParams of action
+ *
+ * @param cluster cluster to which host level params belongs
+ * @param actionContext context of the action. Must be not {@code null}
+ * @param repositoryVersion repository version entity to use
+ * @param hostLevelParams hasgmap with host level params. Must be not {@code null}
+ * @param hostName host name to which add repo onfo
+ * @throws AmbariException
+ */
+ @Deprecated
+ public void addRepoInfoToHostLevelParams(final Cluster cluster, final ActionExecutionContext actionContext,
+ final RepositoryVersionEntity repositoryVersion, final Map<String, String> hostLevelParams,
+ final String hostName) throws AmbariException {
+
+ // if the repo is null, see if any values from the context should go on the
+ // host params and then return
+ if (null == repositoryVersion) {
+ // see if the action context has a repository set to use for the command
+ if (null != actionContext.getRepositoryVersion()) {
+ StackId stackId = actionContext.getRepositoryVersion().getStackId();
+ hostLevelParams.put(KeyNames.STACK_NAME, stackId.getStackName());
+ hostLevelParams.put(KeyNames.STACK_VERSION, stackId.getStackVersion());
+ }
+
+ return;
+ } else {
+ StackId stackId = repositoryVersion.getStackId();
+ hostLevelParams.put(KeyNames.STACK_NAME, stackId.getStackName());
+ hostLevelParams.put(KeyNames.STACK_VERSION, stackId.getStackVersion());
+ }
+
+ JsonObject rootJsonObject = new JsonObject();
+ JsonArray repositories = new JsonArray();
+
+ String hostOsFamily = cluster.getHost(hostName).getOsFamily();
+ for (OperatingSystemEntity operatingSystemEntity : repositoryVersion.getOperatingSystems()) {
+ // ostype in OperatingSystemEntity it's os family. That should be fixed
+ // in OperatingSystemEntity.
+ if (operatingSystemEntity.getOsType().equals(hostOsFamily)) {
+ for (RepositoryEntity repositoryEntity : operatingSystemEntity.getRepositories()) {
+ JsonObject repositoryInfo = new JsonObject();
+ repositoryInfo.addProperty("base_url", repositoryEntity.getBaseUrl());
+ repositoryInfo.addProperty("repo_name", repositoryEntity.getName());
+ repositoryInfo.addProperty("repo_id", repositoryEntity.getRepositoryId());
+
+ repositories.add(repositoryInfo);
}
+ rootJsonObject.add("repositories", repositories);
}
+ }
+ hostLevelParams.put(KeyNames.REPO_INFO, rootJsonObject.toString());
+ }
+
+
+ /**
+ * Get repository info given a cluster and host.
+ *
+ * @param cluster the cluster
+ * @param host the host
+ *
+ * @return the repo info
+ *
+ * @deprecated use {@link #getCommandRepository(Cluster, ServiceComponent, Host)} instead.
+ * @throws SystemException if the repository information can not be obtained
+ */
+ @Deprecated
+ public String getRepoInfo(Cluster cluster, ServiceComponent component, Host host) throws SystemException {
+ final JsonArray jsonList = getBaseUrls(cluster, component, host);
+ final RepositoryVersionEntity rve = getRepositoryVersionEntity(cluster, component);
+
+ if (null == rve || null == jsonList) {
+ return "";
+ }
+
+ final JsonArray result = new JsonArray();
+
+ for (JsonElement e : jsonList) {
+ JsonObject obj = e.getAsJsonObject();
+
+ String repoId = obj.has("repoId") ? obj.get("repoId").getAsString() : null;
+ String repoName = obj.has("repoName") ? obj.get("repoName").getAsString() : null;
+ String baseUrl = obj.has("baseUrl") ? obj.get("baseUrl").getAsString() : null;
+ String osType = obj.has("osType") ? obj.get("osType").getAsString() : null;
+
+ if (null == repoId || null == baseUrl || null == osType || null == repoName) {
+ continue;
+ }
+
+ for (OperatingSystemEntity ose : rve.getOperatingSystems()) {
+ if (ose.getOsType().equals(osType) && ose.isAmbariManagedRepos()) {
+ for (RepositoryEntity re : ose.getRepositories()) {
+ if (re.getName().equals(repoName) &&
+ !re.getBaseUrl().equals(baseUrl)) {
+ obj.addProperty("baseUrl", re.getBaseUrl());
+ }
+ }
+ result.add(e);
+ }
+ }
+ }
+ return result.toString();
+ }
+
+
+ /**
+ * Executed by two different representations of repos. When we are comfortable with the new
+ * implementation, this may be removed and called inline in {@link #getCommandRepository(Cluster, ServiceComponent, Host)}
+ *
+ * @param cluster the cluster to isolate the stack
+ * @param component the component
+ * @param host used to resolve the family for the repositories
+ * @return JsonArray the type as defined by the supplied {@code function}.
+ * @throws SystemException
+ */
+ @Deprecated
+ private JsonArray getBaseUrls(Cluster cluster, ServiceComponent component, Host host) throws SystemException {
+
+ String hostOsType = host.getOsType();
+ String hostOsFamily = host.getOsFamily();
+ String hostName = host.getHostName();
+
+ StackId stackId = component.getDesiredStackId();
+ Map<String, List<RepositoryInfo>> repos;
+
+ try {
+ repos = ami.get().getRepository(stackId.getStackName(), stackId.getStackVersion());
+ }catch (AmbariException e) {
+ throw new SystemException("Unhandled exception", e);
+ }
+
+ String family = os_family.get().find(hostOsType);
+ if (null == family) {
+ family = hostOsFamily;
+ }
+
+ final List<RepositoryInfo> repoInfoList;
+
+ // !!! check for the most specific first
+ if (repos.containsKey(hostOsType)) {
+ repoInfoList = repos.get(hostOsType);
+ } else if (null != family && repos.containsKey(family)) {
+ repoInfoList = repos.get(family);
+ } else {
+ repoInfoList = null;
+ LOG.warn("Could not retrieve repo information for host"
+ + ", hostname=" + hostName
+ + ", clusterName=" + cluster.getClusterName()
+ + ", stackInfo=" + stackId.getStackId());
+ }
+
+ return (null == repoInfoList) ? null : (JsonArray) gson.toJsonTree(repoInfoList);
+ }
+
+
+ /**
+ * Adds a command repository to the action context
+ * @param context the context
+ * @param osEntity the OS family
+ */
+ public void addCommandRepositoryToContext(ActionExecutionContext context,
+ OperatingSystemEntity osEntity) throws SystemException {
+
+ final RepositoryVersionEntity repoVersion = context.getRepositoryVersion();
+ final CommandRepository commandRepo = getCommandRepository(repoVersion, osEntity);
+
+ ClusterVersionSummary summary = null;
+
+ if (RepositoryType.STANDARD != repoVersion.getType()) {
+ try {
+ final Cluster cluster = clusters.get().getCluster(context.getClusterName());
+
+ VersionDefinitionXml xml = repoVersion.getRepositoryXml();
+ summary = xml.getClusterSummary(cluster);
+ } catch (Exception e) {
+ LOG.warn("Could not determine repository from %s/%s. Will not pass cluster version.");
+ }
+ }
+
+ final ClusterVersionSummary clusterSummary = summary;
+
+
+ context.addVisitor(command -> {
+ if (null == command.getRepositoryFile()) {
+ command.setRepositoryFile(commandRepo);
+ }
+
+ if (null != clusterSummary) {
+ Map<String, Object> params = command.getRoleParameters();
+ if (null == params) {
+ params = new HashMap<>();
+ command.setRoleParameters(params);
+ }
+ params.put(KeyNames.CLUSTER_VERSION_SUMMARY, clusterSummary);
+ }
+
});
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
index 6ab2fd2..2167b7b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
@@ -25,7 +25,7 @@ import javax.xml.bind.annotation.XmlSeeAlso;
/**
* Base class to identify the items that could possibly occur during an upgrade
*/
-@XmlSeeAlso(value={ExecuteTask.class, ConfigureTask.class, ManualTask.class, RestartTask.class, StartTask.class, StopTask.class, ServerActionTask.class, ConfigureFunction.class})
+@XmlSeeAlso(value={ExecuteTask.class, CreateAndConfigureTask.class, ConfigureTask.class, ManualTask.class, RestartTask.class, StartTask.class, StopTask.class, ServerActionTask.class, ConfigureFunction.class})
public abstract class Task {
/**
@@ -96,6 +96,10 @@ public abstract class Task {
*/
CONFIGURE,
/**
+ * Task that create a config type if it does not, and alters a configuration.
+ */
+ CREATE_AND_CONFIGURE,
+ /**
* Task that sets up the configuration for subsequent task
*/
CONFIGURE_FUNCTION,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index 0b9811e..73c0e14 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@ -1324,8 +1324,7 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
ServiceComponentUninstalledEvent event = new ServiceComponentUninstalledEvent(
clusterId, stackName, stackVersion, serviceName, serviceType, serviceGroupName, componentName,
- hostName, recoveryEnabled);
-
+ hostName, recoveryEnabled, null);
eventPublisher.publish(event);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
index 404068d..24b4785 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
@@ -30,6 +30,7 @@ import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.AmbariServer;
+import org.apache.ambari.server.controller.RootComponent;
import org.apache.ambari.server.controller.internal.ProvisionAction;
import org.apache.ambari.server.controller.internal.Stack;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -205,7 +206,7 @@ public class BlueprintFactory {
allComponents.addAll(components);
}
// currently ambari server is no a recognized component
- allComponents.add("AMBARI_SERVER");
+ allComponents.add(RootComponent.AMBARI_SERVER.name());
return allComponents;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
index 379a69c..3273a4e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
@@ -52,7 +52,7 @@ import org.slf4j.LoggerFactory;
*/
public class ClusterConfigurationRequest {
- protected final static Logger LOG = LoggerFactory.getLogger(ClusterConfigurationRequest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClusterConfigurationRequest.class);
/**
* a regular expression Pattern used to find "clusterHostInfo.(component_name)_host" placeholders in strings
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/topology/PersistedStateImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/PersistedStateImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/PersistedStateImpl.java
index 3bfa644..1374a42 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/PersistedStateImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/PersistedStateImpl.java
@@ -65,7 +65,7 @@ import com.google.inject.persist.Transactional;
@Singleton
public class PersistedStateImpl implements PersistedState {
- protected final static Logger LOG = LoggerFactory.getLogger(PersistedState.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PersistedState.class);
@Inject
private TopologyRequestDAO topologyRequestDAO;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java
index 2258d97..fa528c8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java
@@ -88,7 +88,7 @@ public class RequiredPasswordValidator implements TopologyValidator {
for (ComponentV2 component : hostGroup.getComponents()) {
//for now, AMBARI is not recognized as a service in Stacks
if (component.getType().equals("AMBARI_SERVER")) {
- continue;
+ continue;
}
Service service = component.getService();
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 0941535..f97aab3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -143,6 +143,7 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
new HashMap<>();
protected String ambariUpgradeConfigUpdatesFileName;
+ private Map<String,String> upgradeJsonOutput = new HashMap<>();
@Inject
public AbstractUpgradeCatalog(Injector injector) {
@@ -261,6 +262,13 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
}
/**
+ * {@inheritDoc}
+ */
+ public Map<String,String> getUpgradeJsonOutput() {
+ return upgradeJsonOutput;
+ }
+
+ /**
* Update metainfo to new version.
*/
@Transactional
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index 8812ef5..039e041 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -26,7 +26,9 @@ import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Properties;
import java.util.Set;
@@ -34,6 +36,7 @@ import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.audit.AuditLoggerModule;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.ControllerModule;
+import org.apache.ambari.server.ldap.LdapModule;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.utils.EventBusSynchronizer;
import org.apache.ambari.server.utils.VersionUtils;
@@ -41,6 +44,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.support.JdbcUtils;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
@@ -56,6 +61,7 @@ public class SchemaUpgradeHelper {
private DBAccessor dbAccessor;
private Configuration configuration;
private static final String[] rcaTableNames = {"workflow", "job", "task", "taskAttempt", "hdfsEvent", "mapreduceEvent", "clusterEvent"};
+ static final Gson gson = new GsonBuilder().create();
@Inject
public SchemaUpgradeHelper(Set<UpgradeCatalog> allUpgradeCatalogs,
@@ -179,6 +185,7 @@ public class SchemaUpgradeHelper {
catalogBinder.addBinding().to(UpgradeCatalog251.class);
catalogBinder.addBinding().to(UpgradeCatalog252.class);
catalogBinder.addBinding().to(UpgradeCatalog260.class);
+ catalogBinder.addBinding().to(UpgradeCatalog261.class);
catalogBinder.addBinding().to(UpgradeCatalog300.class);
catalogBinder.addBinding().to(FinalUpgradeCatalog.class);
@@ -249,6 +256,26 @@ public class SchemaUpgradeHelper {
}
}
+ public void outputUpgradeJsonOutput(List<UpgradeCatalog> upgradeCatalogs)
+ throws AmbariException {
+ LOG.info("Combining upgrade json output.");
+ Map<String,String> combinedUpgradeJsonOutput = new HashMap<>();
+
+ if (upgradeCatalogs != null && !upgradeCatalogs.isEmpty()) {
+ for (UpgradeCatalog upgradeCatalog : upgradeCatalogs) {
+ try {
+ combinedUpgradeJsonOutput.putAll(upgradeCatalog.getUpgradeJsonOutput());
+
+ } catch (Exception e) {
+ LOG.error("Upgrade failed. ", e);
+ throw new AmbariException(e.getMessage(), e);
+ }
+ }
+ }
+ String content = gson.toJson(combinedUpgradeJsonOutput);
+ System.out.println(content);
+ }
+
public void resetUIState() throws AmbariException {
LOG.info("Resetting UI state.");
try {
@@ -373,7 +400,7 @@ public class SchemaUpgradeHelper {
System.exit(1);
}
- Injector injector = Guice.createInjector(new UpgradeHelperModule(), new AuditLoggerModule());
+ Injector injector = Guice.createInjector(new UpgradeHelperModule(), new AuditLoggerModule(), new LdapModule());
SchemaUpgradeHelper schemaUpgradeHelper = injector.getInstance(SchemaUpgradeHelper.class);
//Fail if MySQL database has tables with MyISAM engine
@@ -419,6 +446,7 @@ public class SchemaUpgradeHelper {
schemaUpgradeHelper.executeDMLUpdates(upgradeCatalogs, ambariUpgradeConfigUpdatesFileName);
schemaUpgradeHelper.executeOnPostUpgrade(upgradeCatalogs);
+ schemaUpgradeHelper.outputUpgradeJsonOutput(upgradeCatalogs);
schemaUpgradeHelper.resetUIState();
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog.java
index 21273fd..37a3b5e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.upgrade;
import java.sql.SQLException;
+import java.util.Map;
import org.apache.ambari.server.AmbariException;
@@ -88,4 +89,9 @@ public interface UpgradeCatalog {
* Update schema version in the database to the Target one
*/
void updateDatabaseSchemaVersion();
+
+ /*
+ Get upgrade json output, which is sent to python executing process.
+ */
+ Map<String,String> getUpgradeJsonOutput();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index 11f79fe..4d9a5da 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -17,8 +17,6 @@
*/
package org.apache.ambari.server.upgrade;
-import static org.apache.ambari.server.view.ViewContextImpl.CORE_SITE;
-
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -47,6 +45,7 @@ import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.commons.lang.StringUtils;
@@ -134,9 +133,23 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
public static final String HOST_COMPONENT_DESIRED_STATE = "hostcomponentdesiredstate";
public static final String HOST_COMPONENT_STATE = "hostcomponentstate";
+ private static final String CORE_SITE = "core-site";
public static final String AMS_SSL_CLIENT = "ams-ssl-client";
public static final String METRIC_TRUSTSTORE_ALIAS = "ssl.client.truststore.alias";
+ private static final String HIVE_INTERACTIVE_SITE = "hive-interactive-site";
+ public static final String HIVE_LLAP_DAEMON_KEYTAB_FILE = "hive.llap.daemon.keytab.file";
+ public static final String HIVE_LLAP_ZK_SM_KEYTAB_FILE = "hive.llap.zk.sm.keytab.file";
+ public static final String HIVE_LLAP_TASK_KEYTAB_FILE = "hive.llap.task.keytab.file";
+ public static final String HIVE_SERVER_KERBEROS_PREFIX = "/HIVE/HIVE_SERVER/";
+ public static final String YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY = "llap_zk_hive";
+ public static final String YARN_LLAP_TASK_HIVE_KERBEROS_IDENTITY = "llap_task_hive";
+ public static final String HIVE_SERVER_HIVE_KERBEROS_IDENTITY = "hive_server_hive";
+
+ // Used to track whether YARN -> NODEMANAGER -> 'llap_zk_hive' kerberos descriptor was updated or not.
+ private List<String> yarnKerberosDescUpdatedList = new ArrayList<>();
+
+
/**
* Logger.
*/
@@ -193,7 +206,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
*/
@Override
protected void executeDDLUpdates() throws AmbariException, SQLException {
- int currentVersionID = getCurrentVersionID();
+ Integer currentVersionID = getCurrentVersionID();
dropBrokenFK();
updateServiceComponentDesiredStateTable(currentVersionID);
updateServiceDesiredStateTable(currentVersionID);
@@ -360,10 +373,13 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
* Removes {@value #FK_SDS_DESIRED_STACK_ID} foreign key.
* adds {@value #FK_REPO_VERSION_ID} foreign key.
*
+ * @param currentRepoID id of current repo_version. Can be null if there are no cluster repo versions
+ * (in this case {@value #SERVICE_DESIRED_STATE_TABLE} table must be empty)
+ *
* @throws java.sql.SQLException
*/
- private void updateServiceDesiredStateTable(int currentRepoID) throws SQLException {
-
+ private void updateServiceDesiredStateTable(Integer currentRepoID) throws SQLException {
+ //in case if currentRepoID is null {@value #SERVICE_DESIRED_STATE_TABLE} table must be empty and null defaultValue is ok for non-nullable column
dbAccessor.addColumn(SERVICE_DESIRED_STATE_TABLE,
new DBAccessor.DBColumnInfo(DESIRED_REPO_VERSION_ID_COLUMN, Long.class, null, currentRepoID, false));
dbAccessor.alterColumn(SERVICE_DESIRED_STATE_TABLE,
@@ -413,9 +429,13 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
* Removes {@value #FK_SCDS_DESIRED_STACK_ID} foreign key.
* adds {@value #FK_SCDS_DESIRED_REPO_ID} foreign key.
*
+ * @param currentRepoID id of current repo_version. Can be null if there are no cluster repo versions
+ * (in this case {@value #SERVICE_DESIRED_STATE_TABLE} table must be empty)
+ *
* @throws java.sql.SQLException
*/
- private void updateServiceComponentDesiredStateTable(int currentRepoID) throws SQLException {
+ private void updateServiceComponentDesiredStateTable(Integer currentRepoID) throws SQLException {
+ //in case if currentRepoID is null {@value #SERVICE_DESIRED_STATE_TABLE} table must be empty and null defaultValue is ok for non-nullable column
dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
new DBAccessor.DBColumnInfo(DESIRED_REPO_VERSION_ID_COLUMN, Long.class, null, currentRepoID, false));
dbAccessor.alterColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
@@ -492,15 +512,32 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
ensureZeppelinProxyUserConfigs();
updateKerberosDescriptorArtifacts();
updateAmsConfigs();
+ updateHiveConfigs();
updateHDFSWidgetDefinition();
updateExistingRepositoriesToBeResolved();
}
- public int getCurrentVersionID() throws AmbariException, SQLException {
+ /**
+ * get {@value #REPO_VERSION_ID_COLUMN} value from {@value #CLUSTER_VERSION_TABLE}
+ * where {@value #STATE_COLUMN} = {@value #CURRENT}
+ * and validate it
+ *
+ * @return current version ID or null if no cluster versions do exist
+ * @throws AmbariException if cluster versions are present, but current is not selected
+ * @throws SQLException
+ */
+ public Integer getCurrentVersionID() throws AmbariException, SQLException {
List<Integer> currentVersionList = dbAccessor.getIntColumnValues(CLUSTER_VERSION_TABLE, REPO_VERSION_ID_COLUMN,
new String[]{STATE_COLUMN}, new String[]{CURRENT}, false);
- if (currentVersionList.size() != 1) {
- throw new AmbariException("Can't get current version id");
+ if (currentVersionList.isEmpty()) {
+ List<Integer> allVersionList = dbAccessor.getIntColumnValues(CLUSTER_VERSION_TABLE, REPO_VERSION_ID_COLUMN, null, null,false);
+ if (allVersionList.isEmpty()){
+ return null;
+ } else {
+ throw new AmbariException("Unable to find any CURRENT repositories.");
+ }
+ } else if (currentVersionList.size() != 1) {
+ throw new AmbariException("The following repositories were found to be CURRENT: ".concat(StringUtils.join(currentVersionList, ",")));
}
return currentVersionList.get(0);
}
@@ -615,6 +652,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
if (kerberosDescriptor != null) {
fixRangerKMSKerberosDescriptor(kerberosDescriptor);
fixIdentityReferences(getCluster(artifactEntity), kerberosDescriptor);
+ fixYarnHsiKerberosDescriptorAndSiteConfig(getCluster(artifactEntity), kerberosDescriptor);
artifactEntity.setArtifactData(kerberosDescriptor.toMap());
artifactDAO.merge(artifactEntity);
@@ -641,6 +679,132 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
}
}
+ /**
+ * Updates YARN's NM 'llap_zk_hive' kerberos descriptor as reference and the associated config
+ * hive-interactive-site/hive.llap.zk.sm.keytab.file
+ */
+ protected void fixYarnHsiKerberosDescriptorAndSiteConfig(Cluster cluster, KerberosDescriptor kerberosDescriptor) {
+ LOG.info("Updating YARN's HSI Kerberos Descriptor ....");
+
+ // Step 1. Get Hive -> HIVE_SERVER -> 'hive_server_hive' kerberos description for referencing later
+ KerberosServiceDescriptor hiveServiceDescriptor = kerberosDescriptor.getService("HIVE");
+ KerberosIdentityDescriptor hsh_identityDescriptor = null;
+ KerberosPrincipalDescriptor hsh_principalDescriptor = null;
+ KerberosKeytabDescriptor hsh_keytabDescriptor = null;
+ if (hiveServiceDescriptor != null) {
+ KerberosComponentDescriptor hiveServerKerberosDescriptor = hiveServiceDescriptor.getComponent("HIVE_SERVER");
+ if (hiveServerKerberosDescriptor != null) {
+ hsh_identityDescriptor = hiveServerKerberosDescriptor.getIdentity(HIVE_SERVER_HIVE_KERBEROS_IDENTITY);
+ if (hsh_identityDescriptor != null) {
+ LOG.info(" Retrieved HIVE->HIVE_SERVER kerberos descriptor. Name = " + hsh_identityDescriptor.getName());
+ hsh_principalDescriptor = hsh_identityDescriptor.getPrincipalDescriptor();
+ hsh_keytabDescriptor = hsh_identityDescriptor.getKeytabDescriptor();
+ }
+ }
+
+ // Step 2. Update YARN -> NODEMANAGER's : (1). 'llap_zk_hive' and (2). 'llap_task_hive' kerberos descriptor as reference to
+ // HIVE -> HIVE_SERVER -> 'hive_server_hive' (Same as YARN -> NODEMANAGER -> 'yarn_nodemanager_hive_server_hive')
+ if (hsh_principalDescriptor != null && hsh_keytabDescriptor != null) {
+ KerberosServiceDescriptor yarnServiceDescriptor = kerberosDescriptor.getService("YARN");
+ if (yarnServiceDescriptor != null) {
+ KerberosComponentDescriptor yarnNmKerberosDescriptor = yarnServiceDescriptor.getComponent("NODEMANAGER");
+ if (yarnNmKerberosDescriptor != null) {
+ String[] identities = {YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY, YARN_LLAP_TASK_HIVE_KERBEROS_IDENTITY};
+ for (String identity : identities) {
+ KerberosIdentityDescriptor identityDescriptor = yarnNmKerberosDescriptor.getIdentity(identity);
+
+ KerberosPrincipalDescriptor principalDescriptor = null;
+ KerberosKeytabDescriptor keytabDescriptor = null;
+ if (identityDescriptor != null) {
+ LOG.info(" Retrieved YARN->NODEMANAGER kerberos descriptor to be updated. Name = " + identityDescriptor.getName());
+ principalDescriptor = identityDescriptor.getPrincipalDescriptor();
+ keytabDescriptor = identityDescriptor.getKeytabDescriptor();
+
+ identityDescriptor.setReference(HIVE_SERVER_KERBEROS_PREFIX + hsh_identityDescriptor.getName());
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' identity descriptor reference = '"
+ + identityDescriptor.getReference() + "'");
+ principalDescriptor.setValue(null);
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' principal descriptor value = '"
+ + principalDescriptor.getValue() + "'");
+
+ // Updating keytabs now
+ keytabDescriptor.setFile(null);
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' keytab descriptor file = '"
+ + keytabDescriptor.getFile() + "'");
+ keytabDescriptor.setOwnerName(null);
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' keytab descriptor owner name = '" + keytabDescriptor.getOwnerName() + "'");
+ keytabDescriptor.setOwnerAccess(null);
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' keytab descriptor owner access = '" + keytabDescriptor.getOwnerAccess() + "'");
+ keytabDescriptor.setGroupName(null);
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' keytab descriptor group name = '" + keytabDescriptor.getGroupName() + "'");
+ keytabDescriptor.setGroupAccess(null);
+ LOG.info(" Updated '" + YARN_LLAP_ZK_HIVE_KERBEROS_IDENTITY + "' keytab descriptor group access = '" + keytabDescriptor.getGroupAccess() + "'");
+
+ // Need this as trigger to update the HIVE_LLAP_ZK_SM_KEYTAB_FILE configs later.
+
+ // Get the keytab file 'config name'.
+ String[] splits = keytabDescriptor.getConfiguration().split("/");
+ if (splits != null && splits.length == 2) {
+ updateYarnKerberosDescUpdatedList(splits[1]);
+ LOG.info(" Updated 'yarnKerberosDescUpdatedList' = " + getYarnKerberosDescUpdatedList());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ public void updateYarnKerberosDescUpdatedList(String val) {
+ yarnKerberosDescUpdatedList.add(val);
+ }
+
+ public List<String> getYarnKerberosDescUpdatedList() {
+ return yarnKerberosDescUpdatedList;
+ }
+
+ protected void updateHiveConfigs() throws AmbariException {
+ AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+ Clusters clusters = ambariManagementController.getClusters();
+ if (clusters != null) {
+ Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
+ if (clusterMap != null && !clusterMap.isEmpty()) {
+ for (final Cluster cluster : clusterMap.values()) {
+ // Updating YARN->NodeManager kerebros descriptor : (1). 'llap_zk_hive' and (2). 'llap_task_hive''s associated configs
+ // hive-interactive-site/hive.llap.zk.sm.keytab.file and hive-interactive-site/hive.llap.task.keytab.file respectively,
+ // based on what hive-interactive-site/hive.llap.daemon.keytab.file has.
+ Config hsiSiteConfig = cluster.getDesiredConfigByType(HIVE_INTERACTIVE_SITE);
+ if (hsiSiteConfig != null) {
+ Map<String, String> hsiSiteConfigProperties = hsiSiteConfig.getProperties();
+ if (hsiSiteConfigProperties != null &&
+ hsiSiteConfigProperties.containsKey(HIVE_LLAP_DAEMON_KEYTAB_FILE)) {
+ String[] identities = {HIVE_LLAP_ZK_SM_KEYTAB_FILE, HIVE_LLAP_TASK_KEYTAB_FILE};
+ Map<String, String> newProperties = new HashMap<>();
+ for (String identity : identities) {
+ // Update only if we were able to modify the corresponding kerberos descriptor,
+ // reflected in list 'getYarnKerberosDescUpdatedList'.
+ if (getYarnKerberosDescUpdatedList().contains(identity) && hsiSiteConfigProperties.containsKey(identity)) {
+ newProperties.put(identity, hsiSiteConfigProperties.get(HIVE_LLAP_DAEMON_KEYTAB_FILE));
+ }
+ }
+
+ // Update step.
+ if (newProperties.size() > 0) {
+ try {
+ updateConfigurationPropertiesForCluster(cluster, HIVE_INTERACTIVE_SITE, newProperties, true, false);
+ LOG.info("Updated HSI config(s) : " + newProperties.keySet() + " with value(s) = " + newProperties.values() + " respectively.");
+ } catch (AmbariException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
protected void updateAmsConfigs() throws AmbariException {
AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
Clusters clusters = ambariManagementController.getClusters();
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog261.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog261.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog261.java
new file mode 100644
index 0000000..ba95833
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog261.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.upgrade;
+
+import java.sql.SQLException;
+import java.util.Map;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+
+/**
+ * The {@link UpgradeCatalog261} upgrades Ambari from 2.6.0 to 2.6.1.
+ */
+public class UpgradeCatalog261 extends AbstractUpgradeCatalog {
+ private static final String CORE_SITE = "core-site";
+ private static final String COMPRESSION_CODECS_PROPERTY = "io.compression.codecs";
+ private static final String COMPRESSION_CODECS_LZO = "com.hadoop.compression.lzo";
+ private static final String LZO_ENABLED_JSON_KEY = "lzo_enabled";
+
+ private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog261.class);
+
+ /**
+ * Constructor.
+ *
+ * @param injector
+ */
+ @Inject
+ public UpgradeCatalog261(Injector injector) {
+ super(injector);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String getSourceVersion() {
+ return "2.6.0";
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String getTargetVersion() {
+ return "2.6.1";
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void executeDDLUpdates() throws AmbariException, SQLException {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void executePreDMLUpdates() throws AmbariException, SQLException {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void executeDMLUpdates() throws AmbariException, SQLException {
+ // TODO: make this map with clusterids as keys
+ this.getUpgradeJsonOutput().put(LZO_ENABLED_JSON_KEY, isLzoEnabled().toString());
+ }
+
+ protected Boolean isLzoEnabled() throws AmbariException {
+ AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+ Clusters clusters = ambariManagementController.getClusters();
+ if (clusters != null) {
+ Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
+ if (clusterMap != null && !clusterMap.isEmpty()) {
+ for (final Cluster cluster : clusterMap.values()) {
+
+ Config coreSite = cluster.getDesiredConfigByType(CORE_SITE);
+ if (coreSite != null) {
+ Map<String, String> coreSiteProperties = coreSite.getProperties();
+
+ if (coreSiteProperties.containsKey(COMPRESSION_CODECS_PROPERTY)) {
+ String compressionCodecs = coreSiteProperties.get(COMPRESSION_CODECS_PROPERTY);
+
+ if(compressionCodecs.contains(COMPRESSION_CODECS_LZO)) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+ }
+ return false;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
index bfe2a13..2de6095 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
@@ -45,6 +45,7 @@ import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -65,21 +66,21 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
protected static final String STAGE_DISPLAY_STATUS_COLUMN = "display_status";
protected static final String REQUEST_TABLE = "request";
protected static final String REQUEST_DISPLAY_STATUS_COLUMN = "display_status";
- protected static final String CLUSTER_CONFIG_TABLE = "clusterconfig";
- protected static final String CLUSTER_CONFIG_SELECTED_COLUMN = "selected";
- protected static final String CLUSTER_CONFIG_SELECTED_TIMESTAMP_COLUMN = "selected_timestamp";
protected static final String HOST_ROLE_COMMAND_TABLE = "host_role_command";
protected static final String HRC_OPS_DISPLAY_NAME_COLUMN = "ops_display_name";
- protected static final String COMPONENT_TABLE = "servicecomponentdesiredstate";
protected static final String COMPONENT_DESIRED_STATE_TABLE = "hostcomponentdesiredstate";
protected static final String COMPONENT_STATE_TABLE = "hostcomponentstate";
protected static final String SERVICE_DESIRED_STATE_TABLE = "servicedesiredstate";
protected static final String SECURITY_STATE_COLUMN = "security_state";
+ protected static final String AMBARI_CONFIGURATION_TABLE = "ambari_configuration";
+ protected static final String AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN = "category_name";
+ protected static final String AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN = "property_name";
+ protected static final String AMBARI_CONFIGURATION_PROPERTY_VALUE_COLUMN = "property_value";
+
@Inject
DaoUtils daoUtils;
-
// ----- Constructors ------------------------------------------------------
/**
@@ -122,6 +123,7 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
updateStageTable();
addOpsDisplayNameColumnToHostRoleCommand();
removeSecurityState();
+ addAmbariConfigurationTable();
}
protected void updateStageTable() throws SQLException {
@@ -133,6 +135,16 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
new DBAccessor.DBColumnInfo(REQUEST_DISPLAY_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
}
+ protected void addAmbariConfigurationTable() throws SQLException {
+ List<DBAccessor.DBColumnInfo> columns = new ArrayList<>();
+ columns.add(new DBAccessor.DBColumnInfo(AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN, String.class, 100, null, false));
+ columns.add(new DBAccessor.DBColumnInfo(AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN, String.class, 100, null, false));
+ columns.add(new DBAccessor.DBColumnInfo(AMBARI_CONFIGURATION_PROPERTY_VALUE_COLUMN, String.class, 255, null, true));
+
+ dbAccessor.createTable(AMBARI_CONFIGURATION_TABLE, columns);
+ dbAccessor.addPKConstraint(AMBARI_CONFIGURATION_TABLE, "PK_ambari_configuration", AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN, AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN);
+ }
+
/**
* {@inheritDoc}
*/
@@ -149,6 +161,7 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
showHcatDeletedUserMessage();
setStatusOfStagesAndRequests();
updateLogSearchConfigs();
+ updateKerberosConfigurations();
}
protected void showHcatDeletedUserMessage() {
@@ -265,7 +278,7 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
"logsearch-common-properties", Collections.emptyMap(), "ambari-upgrade",
String.format("Updated logsearch-common-properties during Ambari Upgrade from %s to %s",
getSourceVersion(), getTargetVersion()));
-
+
String defaultLogLevels = logSearchProperties.getProperties().get("logsearch.logfeeder.include.default.level");
Set<String> removeProperties = Sets.newHashSet("logsearch.logfeeder.include.default.level");
@@ -324,7 +337,7 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
updateConfigurationPropertiesForCluster(cluster, "logsearch-audit_logs-solrconfig", Collections.singletonMap("content", content), true, true);
}
}
-
+
Config logFeederOutputConfig = cluster.getDesiredConfigByType("logfeeder-output-config");
if (logFeederOutputConfig != null) {
String content = logFeederOutputConfig.getProperties().get("content");
@@ -346,4 +359,28 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
}
}
}
+
+ protected void updateKerberosConfigurations() throws AmbariException {
+ AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+ Clusters clusters = ambariManagementController.getClusters();
+ if (clusters != null) {
+ Map<String, Cluster> clusterMap = clusters.getClusters();
+
+ if (!MapUtils.isEmpty(clusterMap)) {
+ for (Cluster cluster : clusterMap.values()) {
+ Config config = cluster.getDesiredConfigByType("kerberos-env");
+
+ if (config != null) {
+ Map<String, String> properties = config.getProperties();
+ if (properties.containsKey("group")) {
+ // Covert kerberos-env/group to kerberos-env/ipa_user_group
+ updateConfigurationPropertiesForCluster(cluster, "kerberos-env",
+ Collections.singletonMap("ipa_user_group", properties.get("group")), Collections.singleton("group"),
+ true, false);
+ }
+ }
+ }
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/utils/RetryHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/RetryHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/RetryHelper.java
index d732edf..14c276c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/RetryHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/RetryHelper.java
@@ -34,7 +34,7 @@ import org.slf4j.LoggerFactory;
* TODO injection as Guice singleon, static for now to avoid major modifications
*/
public class RetryHelper {
- protected final static Logger LOG = LoggerFactory.getLogger(RetryHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RetryHelper.class);
private static Clusters s_clusters;
private static ThreadLocal<Set<Cluster>> affectedClusters = new ThreadLocal<Set<Cluster>>(){
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/utils/VersionUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/VersionUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/VersionUtils.java
index 65966ed..6a3d81c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/VersionUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/VersionUtils.java
@@ -22,6 +22,7 @@ import java.util.List;
import org.apache.ambari.server.bootstrap.BootStrapImpl;
import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.math.NumberUtils;
/**
* Provides various utility functions to be used for version handling.
@@ -81,7 +82,7 @@ public class VersionUtils {
}
//String pattern = "^([0-9]+)\\.([0-9]+)\\.([0-9]+)\\.([0-9]+).*";
- String pattern = "([0-9]+).([0-9]+).([0-9]+).([0-9]+)?.*";
+ String pattern = "([0-9]+).([0-9]+).([0-9]+).?([0-9]+)?.*";
String[] version1Parts = version1.replaceAll(pattern, "$1.$2.$3.$4").split("\\.");
String[] version2Parts = version2.replaceAll(pattern, "$1.$2.$3.$4").split("\\.");
@@ -212,4 +213,47 @@ public class VersionUtils {
return versionParts[0] + "." + versionParts[1] + "." + versionParts[2];
}
+
+ /**
+ * Compares versions, using a build number using a dash separator, if one exists.
+ * This is is useful when comparing repository versions with one another that include
+ * build number
+ * @param version1
+ * the first version
+ * @param version2
+ * the second version
+ * @param places
+ * the number of decimal-separated places to compare
+ * @return
+ */
+ public static int compareVersionsWithBuild(String version1, String version2, int places) {
+ version1 = (null == version1) ? "0" : version1;
+ version2 = (null == version2) ? "0" : version2;
+
+ // check _exact_ equality
+ if (StringUtils.equals(version1, version2)) {
+ return 0;
+ }
+
+ int compare = VersionUtils.compareVersions(version1, version2, places);
+ if (0 != compare) {
+ return compare;
+ }
+
+ int v1 = 0;
+ int v2 = 0;
+ if (version1.indexOf('-') > -1) {
+ v1 = NumberUtils.toInt(version1.substring(version1.indexOf('-')), 0);
+ }
+
+ if (version2.indexOf('-') > -1) {
+ v2 = NumberUtils.toInt(version2.substring(version2.indexOf('-')), 0);
+ }
+
+ compare = v2 - v1;
+
+ return Integer.compare(compare, 0);
+
+ }
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
index 55f85a7..9712ca4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewDataMigrationUtility.java
@@ -37,7 +37,7 @@ public class ViewDataMigrationUtility {
/**
* The logger.
*/
- protected final static Logger LOG = LoggerFactory.getLogger(ViewDataMigrationUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ViewDataMigrationUtility.class);
/**
* The View Registry.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
index ad1cc52..d729942 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
@@ -52,7 +52,7 @@ public class ViewExtractor {
/**
* The logger.
*/
- protected final static Logger LOG = LoggerFactory.getLogger(ViewExtractor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ViewExtractor.class);
// ----- ViewExtractor -----------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index c50276e..f071262 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -205,7 +205,7 @@ public class ViewRegistry {
/**
* The logger.
*/
- protected final static Logger LOG = LoggerFactory.getLogger(ViewRegistry.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ViewRegistry.class);
/**
* View Data Migration Utility
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/view/ViewSubResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewSubResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewSubResourceProvider.java
index 5cceb1e..5f6201a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewSubResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewSubResourceProvider.java
@@ -47,6 +47,8 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.entities.ViewEntity;
import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
import org.apache.ambari.view.ReadRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An SPI resource provider implementation used to adapt a
@@ -55,6 +57,8 @@ import org.apache.ambari.view.ReadRequest;
*/
public class ViewSubResourceProvider extends AbstractResourceProvider {
+ private static final Logger LOG = LoggerFactory.getLogger(ViewSubResourceProvider.class);
+
private static final String VIEW_NAME_PROPERTY_ID = "view_name";
private static final String VIEW_VERSION_PROPERTY_ID = "version";
private static final String INSTANCE_NAME_PROPERTY_ID = "instance_name";
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/java/org/apache/ambari/server/view/persistence/DataStoreImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/persistence/DataStoreImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/view/persistence/DataStoreImpl.java
index 5d4c758..aa48315 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/persistence/DataStoreImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/persistence/DataStoreImpl.java
@@ -119,7 +119,7 @@ public class DataStoreImpl implements DataStore {
/**
* The logger.
*/
- protected final static Logger LOG = LoggerFactory.getLogger(DataStoreImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DataStoreImpl.class);
/**
* Max length of entity string field.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 8fcde77..188910e 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -412,6 +412,7 @@ def init_action_parser(action, parser):
parser.add_option('--skip-database-check', action="store_true", default=False, help="Skip database consistency check", dest="skip_database_check")
parser.add_option('--skip-view-extraction', action="store_true", default=False, help="Skip extraction of system views", dest="skip_view_extraction")
parser.add_option('--auto-fix-database', action="store_true", default=False, help="Automatically fix database consistency issues", dest="fix_database_consistency")
+ parser.add_option('--enable-lzo-under-gpl-license', action="store_true", default=False, help="Automatically accepts GPL license", dest="accept_gpl")
add_parser_options('--mpack',
default=None,
help="Specify the path for management pack to be installed/upgraded",
@@ -490,6 +491,7 @@ def init_setup_parser_options(parser):
other_group.add_option('--sqla-server-name', default=None, help="SQL Anywhere server name", dest="sqla_server_name")
other_group.add_option('--sidorsname', default="sname", help="Oracle database identifier type, Service ID/Service "
"Name sid|sname", dest="sid_or_sname")
+ other_group.add_option('--enable-lzo-under-gpl-license', action="store_true", default=False, help="Automatically accepts GPL license", dest="accept_gpl")
parser.add_option_group(other_group)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/python/ambari_server/serverConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverConfiguration.py b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
index df89f79..f744fa0 100644
--- a/ambari-server/src/main/python/ambari_server/serverConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
@@ -40,6 +40,7 @@ from ambari_server.properties import Properties
from ambari_server.userInput import get_validated_string_input
from ambari_server.utils import compare_versions, locate_file, on_powerpc
from ambari_server.ambariPath import AmbariPath
+from ambari_server.userInput import get_YN_input
OS_VERSION = OSCheck().get_os_major_version()
@@ -195,6 +196,15 @@ SETUP_OR_UPGRADE_MSG = "- If this is a new setup, then run the \"ambari-server s
"- If this is an upgrade of an existing setup, run the \"ambari-server upgrade\" command.\n" \
"Refer to the Ambari documentation for more information on setup and upgrade."
+GPL_LICENSE_PROMPT_TEXT = """To download GPL licensed products like lzo you must accept the license terms below:
+LICENSE_LINE_1
+LICENSE_LINE_2
+LICENSE_LINE_3
+LICENSE_LINE_4
+LICENSE_LINE_5
+LICENSE_LINE_6
+Do you accept the GPL License Agreement [y/n] (y)?"""
+
DEFAULT_DB_NAME = "ambari"
SECURITY_KEYS_DIR = "security.server.keys_dir"
@@ -209,6 +219,8 @@ BOOTSTRAP_SETUP_AGENT_SCRIPT = 'bootstrap.setup_agent.script'
STACKADVISOR_SCRIPT = 'stackadvisor.script'
PID_DIR_PROPERTY = 'pid.dir'
SERVER_TMP_DIR_PROPERTY = "server.tmp.dir"
+GPL_LICENSE_ACCEPTED_PROPERTY = 'gpl.license.accepted'
+
REQUIRED_PROPERTIES = [OS_FAMILY_PROPERTY, OS_TYPE_PROPERTY, COMMON_SERVICES_PATH_PROPERTY, SERVER_VERSION_FILE_PATH,
WEBAPP_DIR_PROPERTY, STACK_LOCATION_KEY, SECURITY_KEYS_DIR, JDBC_DATABASE_NAME_PROPERTY,
NR_USER_PROPERTY, JAVA_HOME_PROPERTY, JDBC_PASSWORD_PROPERTY, SHARED_RESOURCES_DIR,
@@ -1139,6 +1151,23 @@ def update_ambari_env():
return -1
return 0
+
+def write_gpl_license_accepted():
+ properties = get_ambari_properties()
+ if properties == -1:
+ err = "Error getting ambari properties"
+ raise FatalException(-1, err)
+
+
+ if GPL_LICENSE_ACCEPTED_PROPERTY in properties.keys() and properties.get_property(GPL_LICENSE_ACCEPTED_PROPERTY).lower() == "true":
+ return True
+
+ result = get_YN_input(GPL_LICENSE_PROMPT_TEXT, True)
+
+ properties.process_pair(GPL_LICENSE_ACCEPTED_PROPERTY, str(result).lower())
+ update_properties(properties)
+
+ return result
def update_ambari_properties():
prev_conf_file = search_file(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE, get_conf_dir())
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/python/ambari_server/serverSetup.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverSetup.py b/ambari-server/src/main/python/ambari_server/serverSetup.py
index 5c016c5..af45584 100644
--- a/ambari-server/src/main/python/ambari_server/serverSetup.py
+++ b/ambari-server/src/main/python/ambari_server/serverSetup.py
@@ -38,11 +38,11 @@ from ambari_commons.str_utils import compress_backslashes
from ambari_server.dbConfiguration import DBMSConfigFactory, TAR_GZ_ARCHIVE_TYPE, default_connectors_map, check_jdbc_drivers
from ambari_server.serverConfiguration import configDefaults, JDKRelease, \
get_ambari_properties, get_is_secure, get_is_persisted, get_java_exe_path, get_JAVA_HOME, get_missing_properties, \
- get_resources_location, get_value_from_properties, read_ambari_user, update_properties, validate_jdk, write_property, \
+ get_resources_location, get_value_from_properties, read_ambari_user, update_properties, validate_jdk, write_property, write_gpl_license_accepted,\
JAVA_HOME, JAVA_HOME_PROPERTY, JCE_NAME_PROPERTY, JDBC_RCA_URL_PROPERTY, JDBC_URL_PROPERTY, \
JDK_NAME_PROPERTY, JDK_RELEASES, NR_USER_PROPERTY, OS_FAMILY, OS_FAMILY_PROPERTY, OS_TYPE, OS_TYPE_PROPERTY, OS_VERSION, \
VIEWS_DIR_PROPERTY, JDBC_DATABASE_PROPERTY, JDK_DOWNLOAD_SUPPORTED_PROPERTY, JCE_DOWNLOAD_SUPPORTED_PROPERTY, SETUP_DONE_PROPERTIES, \
- STACK_JAVA_HOME_PROPERTY, STACK_JDK_NAME_PROPERTY, STACK_JCE_NAME_PROPERTY, STACK_JAVA_VERSION
+ STACK_JAVA_HOME_PROPERTY, STACK_JDK_NAME_PROPERTY, STACK_JCE_NAME_PROPERTY, STACK_JAVA_VERSION, GPL_LICENSE_ACCEPTED_PROPERTY
from ambari_server.serverUtils import is_server_runing
from ambari_server.setupSecurity import adjust_directory_permissions
from ambari_server.userInput import get_YN_input, get_validated_string_input
@@ -1182,6 +1182,10 @@ def setup(options):
err = 'Downloading or installing JDK failed: {0}. Exiting.'.format(e)
raise FatalException(e.code, err)
+ if not get_silent() or options.accept_gpl:
+ print 'Checking GPL software agreement...'
+ write_gpl_license_accepted()
+
print 'Completing setup...'
retcode = configure_os_settings()
if not retcode == 0:
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/python/ambari_server/serverUpgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverUpgrade.py b/ambari-server/src/main/python/ambari_server/serverUpgrade.py
index b0e0128..57a1ae0 100644
--- a/ambari-server/src/main/python/ambari_server/serverUpgrade.py
+++ b/ambari-server/src/main/python/ambari_server/serverUpgrade.py
@@ -28,6 +28,7 @@ import re
import glob
import optparse
import logging
+import ambari_simplejson as json
from ambari_commons.exceptions import FatalException
from ambari_commons.logging_utils import print_info_msg, print_warning_msg, print_error_msg, get_verbose
@@ -41,7 +42,7 @@ from ambari_server.serverConfiguration import configDefaults, get_resources_loca
update_database_name_property, get_admin_views_dir, get_views_dir, get_views_jars, \
AMBARI_PROPERTIES_FILE, IS_LDAP_CONFIGURED, LDAP_PRIMARY_URL_PROPERTY, RESOURCES_DIR_PROPERTY, \
SETUP_OR_UPGRADE_MSG, update_krb_jaas_login_properties, AMBARI_KRB_JAAS_LOGIN_FILE, get_db_type, update_ambari_env, \
- AMBARI_ENV_FILE, JDBC_DATABASE_PROPERTY, get_default_views_dir
+ AMBARI_ENV_FILE, JDBC_DATABASE_PROPERTY, get_default_views_dir, write_gpl_license_accepted
from ambari_server.setupSecurity import adjust_directory_permissions, \
generate_env, ensure_can_start_under_current_user
from ambari_server.utils import compare_versions
@@ -73,6 +74,11 @@ SCHEMA_UPGRADE_DEBUG = False
SUSPEND_START_MODE = False
+INSALLED_LZO_WITHOUT_GPL_TEXT = "By saying no, Ambari will not automatically install LZO on any new host in the cluster." + \
+"It is up to you to ensure LZO is installed and configured appropriately." + \
+"Without LZO being installed and configured data compressed with LZO will not be readable. " + \
+"Are you sure you want to proceed? [y/n] (n)?"
+
def load_stack_values(version, filename):
import xml.etree.ElementTree as ET
values = {}
@@ -146,6 +152,10 @@ def run_schema_upgrade(args):
environ = generate_env(args, ambari_user, current_user)
(retcode, stdout, stderr) = run_os_command(command, env=environ)
+ upgrade_response = json.loads(stdout)
+
+ check_gpl_license_approved(upgrade_response)
+
print_info_msg("Return code from schema upgrade command, retcode = {0}".format(str(retcode)), True)
if stdout:
print_info_msg("Console output from schema upgrade command:", True)
@@ -161,6 +171,12 @@ def run_schema_upgrade(args):
print_info_msg('Schema upgrade completed', True)
return retcode
+def check_gpl_license_approved(upgrade_response):
+ if 'lzo_enabled' not in upgrade_response or upgrade_response['lzo_enabled'].lower() != "true":
+ return
+
+ while not write_gpl_license_accepted() and not get_YN_input(INSALLED_LZO_WITHOUT_GPL_TEXT, False):
+ pass
#
# Upgrades the Ambari Server.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 0ddade4..84349b6 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -97,11 +97,30 @@ CREATE TABLE configuration_base (
CONSTRAINT PK_configuration_base PRIMARY KEY (id)
);
+CREATE TABLE clusterconfig (
+ config_id BIGINT NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version BIGINT NOT NULL,
+ type_name VARCHAR(255) NOT NULL,
+ cluster_id BIGINT NOT NULL,
+ stack_id BIGINT NOT NULL,
+ selected SMALLINT NOT NULL DEFAULT 0,
+ config_data VARCHAR(3000) NOT NULL,
+ config_attributes VARCHAR(3000),
+ create_timestamp BIGINT NOT NULL,
+ unmapped SMALLINT NOT NULL DEFAULT 0,
+ selected_timestamp BIGINT NOT NULL DEFAULT 0,
+ CONSTRAINT PK_clusterconfig PRIMARY KEY (config_id),
+ CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id),
+ CONSTRAINT FK_clusterconfig_stack_id FOREIGN KEY (stack_id) REFERENCES stack(stack_id),
+ CONSTRAINT UQ_config_type_tag UNIQUE (version_tag, type_name, cluster_id),
+ CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+
CREATE TABLE ambari_configuration (
- id BIGINT NOT NULL,
- CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
- CONSTRAINT FK_ambari_conf_conf_base FOREIGN KEY (id) REFERENCES configuration_base (id)
-);
+ category_name VARCHAR(100) NOT NULL,
+ property_name VARCHAR(100) NOT NULL,
+ property_value VARCHAR(255) NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (category_name, property_name));
CREATE TABLE hosts (
host_id BIGINT NOT NULL,
@@ -1018,12 +1037,23 @@ CREATE TABLE kerberos_principal (
CONSTRAINT PK_kerberos_principal PRIMARY KEY (principal_name)
);
+CREATE TABLE kerberos_keytab (
+ keytab_path VARCHAR(255) NOT NULL,
+ CONSTRAINT PK_krb_keytab_path_host_id PRIMARY KEY (keytab_path)
+);
+
+
CREATE TABLE kerberos_principal_host (
principal_name VARCHAR(255) NOT NULL,
+ keytab_path VARCHAR(255) NOT NULL,
+ is_distributed SMALLINT NOT NULL DEFAULT 0,
host_id BIGINT NOT NULL,
- CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, host_id),
+ CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, keytab_path, host_id),
CONSTRAINT FK_krb_pr_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id),
- CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) REFERENCES kerberos_principal (principal_name));
+ CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) REFERENCES kerberos_principal (principal_name),
+ CONSTRAINT FK_krb_pr_host_keytab_path FOREIGN KEY (keytab_path) REFERENCES kerberos_keytab (keytab_path)
+);
+
CREATE TABLE kerberos_descriptor
(
@@ -1274,8 +1304,6 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value)
union all
select 'servicecomponent_version_id_seq', 0 FROM SYSIBM.SYSDUMMY1
union all
- select 'configuration_id_seq', 0 FROM SYSIBM.SYSDUMMY1
- union all
select 'hostcomponentdesiredstate_id_seq', 0 FROM SYSIBM.SYSDUMMY1;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index c6a6e98..f9d5321 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -116,11 +116,30 @@ CREATE TABLE configuration_base (
CONSTRAINT PK_configuration_base PRIMARY KEY (id)
);
+CREATE TABLE clusterconfig (
+ config_id BIGINT NOT NULL,
+ version_tag VARCHAR(100) NOT NULL,
+ version BIGINT NOT NULL,
+ type_name VARCHAR(100) NOT NULL,
+ cluster_id BIGINT NOT NULL,
+ stack_id BIGINT NOT NULL,
+ selected SMALLINT NOT NULL DEFAULT 0,
+ config_data LONGTEXT NOT NULL,
+ config_attributes LONGTEXT,
+ create_timestamp BIGINT NOT NULL,
+ unmapped SMALLINT NOT NULL DEFAULT 0,
+ selected_timestamp BIGINT NOT NULL DEFAULT 0,
+ CONSTRAINT PK_clusterconfig PRIMARY KEY (config_id),
+ CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id),
+ CONSTRAINT FK_clusterconfig_stack_id FOREIGN KEY (stack_id) REFERENCES stack(stack_id),
+ CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
+ CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+
CREATE TABLE ambari_configuration (
- id BIGINT NOT NULL,
- CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
- CONSTRAINT FK_ambari_conf_conf_base FOREIGN KEY (id) REFERENCES configuration_base (id)
-);
+ category_name VARCHAR(100) NOT NULL,
+ property_name VARCHAR(100) NOT NULL,
+ property_value VARCHAR(255) NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (category_name, property_name));
CREATE TABLE hosts (
host_id BIGINT NOT NULL,
@@ -1035,12 +1054,21 @@ CREATE TABLE kerberos_principal (
CONSTRAINT PK_kerberos_principal PRIMARY KEY (principal_name)
);
+CREATE TABLE kerberos_keytab (
+ keytab_path VARCHAR(255) NOT NULL,
+ CONSTRAINT PK_krb_keytab_path_host_id PRIMARY KEY (keytab_path)
+);
+
CREATE TABLE kerberos_principal_host (
principal_name VARCHAR(255) NOT NULL,
+ keytab_path VARCHAR(255) NOT NULL,
+ is_distributed SMALLINT NOT NULL DEFAULT 0,
host_id BIGINT NOT NULL,
- CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, host_id),
+ CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, keytab_path, host_id),
CONSTRAINT FK_krb_pr_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id),
- CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) REFERENCES kerberos_principal (principal_name));
+ CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) REFERENCES kerberos_principal (principal_name),
+ CONSTRAINT FK_krb_pr_host_keytab_path FOREIGN KEY (keytab_path) REFERENCES kerberos_keytab (keytab_path)
+);
CREATE TABLE kerberos_descriptor
(
@@ -1230,7 +1258,6 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES
('remote_cluster_id_seq', 0),
('remote_cluster_service_id_seq', 0),
('servicecomponent_version_id_seq', 0),
- ('configuration_id_seq', 0),
('hostcomponentdesiredstate_id_seq', 0);
INSERT INTO adminresourcetype (resource_type_id, resource_type_name) VALUES