You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2017/05/30 17:10:54 UTC
[34/50] [abbrv] hadoop git commit: YARN-6405. Improve configuring
services through REST API. Contributed by Jian He
YARN-6405. Improve configuring services through REST API. Contributed by Jian He
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2fe06028
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2fe06028
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2fe06028
Branch: refs/heads/yarn-native-services
Commit: 2fe06028aa68d62a6c11980a58a6702b77084577
Parents: 1eb168b
Author: Billie Rinaldi <bi...@apache.org>
Authored: Wed Apr 26 08:44:38 2017 -0700
Committer: Sunil G <su...@apache.org>
Committed: Tue May 30 20:37:33 2017 +0530
----------------------------------------------------------------------
...RN-Simplified-V1-API-Layer-For-Services.yaml | 4 +-
.../api/impl/TestApplicationApiService.java | 26 +-
.../apache/slider/api/ServiceApiConstants.java | 50 +++
.../apache/slider/api/resource/ConfigFile.java | 58 +--
.../slider/api/resource/Configuration.java | 39 +-
.../org/apache/slider/client/SliderClient.java | 16 +-
.../org/apache/slider/common/SliderKeys.java | 4 +-
.../slider/core/launch/CommandLineBuilder.java | 15 -
.../docstore/ConfigurationResolver.java | 24 --
.../apache/slider/providers/ProviderRole.java | 32 +-
.../slider/providers/ProviderService.java | 4 +-
.../apache/slider/providers/ProviderUtils.java | 279 +++++++++-----
.../providers/docker/DockerProviderService.java | 50 +--
.../server/appmaster/RoleLaunchService.java | 24 +-
.../server/appmaster/SliderAppMaster.java | 77 ++--
.../actions/RegisterComponentInstance.java | 12 +-
.../actions/UnregisterComponentInstance.java | 16 +-
.../server/appmaster/metrics/SliderMetrics.java | 23 ++
.../appmaster/monkey/ChaosKillContainer.java | 2 +-
.../server/appmaster/rpc/SliderIPCService.java | 2 +-
.../slider/server/appmaster/state/AppState.java | 384 ++++++++++---------
.../server/appmaster/state/RoleInstance.java | 38 +-
.../server/appmaster/state/RoleStatus.java | 1 +
.../state/StateAccessForProviders.java | 1 +
.../slider/util/RestApiErrorMessages.java | 2 +-
.../org/apache/slider/util/ServiceApiUtil.java | 77 +++-
.../TestMockAppStateDynamicHistory.java | 8 +-
.../TestMockAppStateFlexDynamicRoles.java | 6 +-
.../appstate/TestMockAppStateUniqueNames.java | 77 +++-
.../TestMockContainerResourceAllocations.java | 11 +
.../model/mock/BaseMockAppStateTest.java | 9 +-
.../model/mock/MockProviderService.java | 4 +-
32 files changed, 850 insertions(+), 525 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
index e9239e4..82cc30f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
@@ -347,10 +347,10 @@ definitions:
description: The absolute path that this configuration file should be mounted as, in the application container.
src_file:
type: string
- description: Required for type template. This provides the source location of the template which needs to be mounted as dest_file post property substitutions. Typically the src_file would point to a source controlled network accessible file maintained by tools like puppet, chef, etc.
+ description: This provides the source location of the configuration file, the content of which is dumped to dest_file post property substitutions, in the format as specified in type. Typically the src_file would point to a source controlled network accessible file maintained by tools like puppet, chef, or hdfs etc. Currently, only hdfs is supported.
props:
type: object
- description: A blob of key value pairs that will be dumped in the dest_file in the format as specified in type. If the type is template then the attribute src_file is mandatory and the src_file content is dumped to dest_file post property substitutions.
+ description: A blob of key value pairs that will be dumped in the dest_file in the format as specified in type. If src_file is specified, src_file content are dumped in the dest_file and these properties will overwrite, if any, existing properties in src_file or be added as new properties in src_file.
Container:
description: An instance of a running application container.
properties:
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/test/java/org/apache/hadoop/yarn/services/api/impl/TestApplicationApiService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/test/java/org/apache/hadoop/yarn/services/api/impl/TestApplicationApiService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/test/java/org/apache/hadoop/yarn/services/api/impl/TestApplicationApiService.java
index abad34e..6e077d2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/test/java/org/apache/hadoop/yarn/services/api/impl/TestApplicationApiService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/test/java/org/apache/hadoop/yarn/services/api/impl/TestApplicationApiService.java
@@ -63,7 +63,7 @@ public class TestApplicationApiService {
// no name
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX + "application with no name");
} catch (IllegalArgumentException e) {
Assert.assertEquals(ERROR_APPLICATION_NAME_INVALID, e.getMessage());
@@ -74,7 +74,7 @@ public class TestApplicationApiService {
for (String badName : badNames) {
app.setName(badName);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX + "application with bad name " + badName);
} catch (IllegalArgumentException e) {
Assert.assertEquals(ERROR_APPLICATION_NAME_INVALID_FORMAT,
@@ -85,7 +85,7 @@ public class TestApplicationApiService {
// no artifact
app.setName("finance_home");
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX + "application with no artifact");
} catch (IllegalArgumentException e) {
Assert.assertEquals(ERROR_ARTIFACT_INVALID, e.getMessage());
@@ -95,7 +95,7 @@ public class TestApplicationApiService {
Artifact artifact = new Artifact();
app.setArtifact(artifact);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX + "application with no artifact id");
} catch (IllegalArgumentException e) {
Assert.assertEquals(ERROR_ARTIFACT_ID_INVALID, e.getMessage());
@@ -106,7 +106,7 @@ public class TestApplicationApiService {
artifact.setId("app.io/hbase:facebook_0.2");
app.setNumberOfContainers(5l);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
} catch (IllegalArgumentException e) {
logger.error("application attributes specified should be valid here", e);
Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage());
@@ -128,7 +128,7 @@ public class TestApplicationApiService {
// resource not specified
artifact.setId("docker.io/centos:centos7");
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX + "application with no resource");
} catch (IllegalArgumentException e) {
Assert.assertEquals(ERROR_RESOURCE_INVALID, e.getMessage());
@@ -138,7 +138,7 @@ public class TestApplicationApiService {
Resource res = new Resource();
app.setResource(res);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX + "application with no memory");
} catch (IllegalArgumentException e) {
Assert.assertEquals(ERROR_RESOURCE_MEMORY_INVALID, e.getMessage());
@@ -149,7 +149,7 @@ public class TestApplicationApiService {
res.setMemory("100mb");
res.setCpus(-2);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(
EXCEPTION_PREFIX + "application with invalid no of cpups");
} catch (IllegalArgumentException e) {
@@ -159,17 +159,17 @@ public class TestApplicationApiService {
// number of containers not specified
res.setCpus(2);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(
EXCEPTION_PREFIX + "application with no container count");
} catch (IllegalArgumentException e) {
- Assert.assertEquals(ERROR_CONTAINERS_COUNT_INVALID, e.getMessage());
+ Assert.assertTrue(e.getMessage().contains(ERROR_CONTAINERS_COUNT_INVALID));
}
// specifying profile along with cpus/memory raises exception
res.setProfile("hbase_finance_large");
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX
+ "application with resource profile along with cpus/memory");
} catch (IllegalArgumentException e) {
@@ -182,7 +182,7 @@ public class TestApplicationApiService {
res.setCpus(null);
res.setMemory(null);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
Assert.fail(EXCEPTION_PREFIX
+ "application with resource profile only - NOT SUPPORTED");
} catch (IllegalArgumentException e) {
@@ -198,7 +198,7 @@ public class TestApplicationApiService {
// everything valid here
app.setNumberOfContainers(5l);
try {
- ServiceApiUtil.validateApplicationPostPayload(app);
+ ServiceApiUtil.validateApplicationPayload(app, null);
} catch (IllegalArgumentException e) {
logger.error("application attributes specified should be valid here", e);
Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage());
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/ServiceApiConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/ServiceApiConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/ServiceApiConstants.java
new file mode 100644
index 0000000..5f76f19
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/ServiceApiConstants.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.slider.api;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import static org.apache.slider.util.ServiceApiUtil.$;
+
+/**
+ * This class defines constants that can be used in input spec for
+ * variable substitutions
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public interface ServiceApiConstants {
+
+ // Constants for service
+ String SERVICE_NAME = $("SERVICE_NAME");
+
+ String SERVICE_NAME_LC = $("SERVICE_NAME.lc");
+
+ // Constants for component
+ String COMPONENT_NAME = $("COMPONENT_NAME");
+
+ String COMPONENT_NAME_LC = $("COMPONENT_NAME.lc");
+
+ String COMPONENT_INSTANCE_NAME = $("COMPONENT_INSTANCE_NAME");
+
+ // Constants for component instance
+ String COMPONENT_ID = $("COMPONENT_ID");
+
+ String CONTAINER_ID = $("CONTAINER_ID");
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/ConfigFile.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/ConfigFile.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/ConfigFile.java
index cdc96b8..b4040b6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/ConfigFile.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/ConfigFile.java
@@ -17,20 +17,19 @@
package org.apache.slider.api.resource;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonValue;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
import java.io.Serializable;
+import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonValue;
-
/**
* A config file that needs to be created and made available as a volume in an
* application component container.
@@ -45,7 +44,7 @@ public class ConfigFile implements Serializable {
public enum TypeEnum {
XML("XML"), PROPERTIES("PROPERTIES"), JSON("JSON"), YAML("YAML"), TEMPLATE(
- "TEMPLATE"), ENV("ENV"), HADOOP_XML("HADOOP_XML");
+ "TEMPLATE"), ENV("ENV"), HADOOP_XML("HADOOP_XML"),;
private String value;
@@ -63,7 +62,18 @@ public class ConfigFile implements Serializable {
private TypeEnum type = null;
private String destFile = null;
private String srcFile = null;
- private Map<String, String> props = null;
+ private Map<String, String> props = new HashMap<>();
+
+ public ConfigFile copy() {
+ ConfigFile copy = new ConfigFile();
+ copy.setType(this.getType());
+ copy.setSrcFile(this.getSrcFile());
+ copy.setDestFile(this.getDestFile());
+ if (this.getProps() != null && !this.getProps().isEmpty()) {
+ copy.getProps().putAll(this.getProps());
+ }
+ return copy;
+ }
/**
* Config file in the standard format like xml, properties, json, yaml,
@@ -105,19 +115,20 @@ public class ConfigFile implements Serializable {
}
/**
- * TODO this probably is not required for non-template configs. It is now used as symlink for localization for non-template configs - we could infer the name from destFile instead
- *
- * Required for type template. This provides the source location of the
- * template which needs to be mounted as dest_file post property
- * substitutions. Typically the src_file would point to a source controlled
- * network accessible file maintained by tools like puppet, chef, etc.
+ * This provides the source location of the configuration file, the content
+ * of which is dumped to dest_file post property substitutions, in the format
+ * as specified in type. Typically the src_file would point to a source
+ * controlled network accessible file maintained by tools like puppet, chef,
+ * or hdfs etc. Currently, only hdfs is supported.
**/
public ConfigFile srcFile(String srcFile) {
this.srcFile = srcFile;
return this;
}
- @ApiModelProperty(example = "null", value = "Required for type template. This provides the source location of the template which needs to be mounted as dest_file post property substitutions. Typically the src_file would point to a source controlled network accessible file maintained by tools like puppet, chef, etc.")
+ @ApiModelProperty(example = "null", value = "This provides the source location of the configuration file, "
+ + "the content of which is dumped to dest_file post property substitutions, in the format as specified in type. "
+ + "Typically the src_file would point to a source controlled network accessible file maintained by tools like puppet, chef, or hdfs etc. Currently, only hdfs is supported.")
@JsonProperty("src_file")
public String getSrcFile() {
return srcFile;
@@ -129,17 +140,19 @@ public class ConfigFile implements Serializable {
}
/**
- * A blob of key value pairs that will be dumped in the dest_file in the
- * format as specified in type. If the type is template then the attribute
- * src_file is mandatory and the src_file content is dumped to dest_file post
- * property substitutions.
+ A blob of key value pairs that will be dumped in the dest_file in the format
+ as specified in type. If src_file is specified, src_file content are dumped
+ in the dest_file and these properties will overwrite, if any, existing
+ properties in src_file or be added as new properties in src_file.
**/
public ConfigFile props(Map<String, String> props) {
this.props = props;
return this;
}
- @ApiModelProperty(example = "null", value = "A blob of key value pairs that will be dumped in the dest_file in the format as specified in type. If the type is template then the attribute src_file is mandatory and the src_file content is dumped to dest_file post property substitutions.")
+ @ApiModelProperty(example = "null", value = "A blob of key value pairs that will be dumped in the dest_file in the format as specified in type."
+ + " If src_file is specified, src_file content are dumped in the dest_file and these properties will overwrite, if any,"
+ + " existing properties in src_file or be added as new properties in src_file.")
@JsonProperty("props")
public Map<String, String> getProps() {
return props;
@@ -175,8 +188,7 @@ public class ConfigFile implements Serializable {
ConfigFile configFile = (ConfigFile) o;
return Objects.equals(this.type, configFile.type)
&& Objects.equals(this.destFile, configFile.destFile)
- && Objects.equals(this.srcFile, configFile.srcFile)
- && Objects.equals(this.props, configFile.props);
+ && Objects.equals(this.srcFile, configFile.srcFile);
}
@Override
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/Configuration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/Configuration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/Configuration.java
index 7b3b93e..0df586c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/Configuration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/api/resource/Configuration.java
@@ -17,8 +17,11 @@
package org.apache.slider.api.resource;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
+import org.apache.commons.lang.StringUtils;
import java.io.Serializable;
import java.util.ArrayList;
@@ -27,10 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.commons.lang.StringUtils;
-
/**
* Set of configuration properties that can be injected into the application
* components via envs, files and custom pluggable helper docker containers.
@@ -156,6 +155,13 @@ public class Configuration implements Serializable {
return properties.get(name.trim());
}
+ public String getEnv(String name) {
+ if (name == null) {
+ return null;
+ }
+ return env.get(name.trim());
+ }
+
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
@@ -198,4 +204,29 @@ public class Configuration implements Serializable {
}
return o.toString().replace("\n", "\n ");
}
+
+ /**
+ * Merge all properties and envs from that configuration to this configration.
+ * For ConfigFiles, all properties and envs of that ConfigFile are merged into
+ * this ConfigFile.
+ */
+ public synchronized void mergeFrom(Configuration that) {
+ this.properties.putAll(that.getProperties());
+ this.env.putAll(that.getEnv());
+ Map<String, ConfigFile> thatMap = new HashMap<>();
+ for (ConfigFile file : that.getFiles()) {
+ thatMap.put(file.getDestFile(), file.copy());
+ }
+ for (ConfigFile thisFile : files) {
+ if(thatMap.containsKey(thisFile.getDestFile())) {
+ ConfigFile thatFile = thatMap.get(thisFile.getDestFile());
+ thisFile.getProps().putAll(thatFile.getProps());
+ thatMap.remove(thisFile.getDestFile());
+ }
+ }
+ // add remaining new files from that Configration
+ for (ConfigFile thatFile : thatMap.values()) {
+ files.add(thatFile.copy());
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClient.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClient.java
index 2b0982f..7241374 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClient.java
@@ -653,7 +653,8 @@ public class SliderClient extends AbstractSliderLaunchedService implements RunSe
public ApplicationId actionCreate(Application application)
throws IOException, YarnException {
- ServiceApiUtil.validateApplicationPostPayload(application);
+ ServiceApiUtil.validateApplicationPayload(application,
+ sliderFileSystem.getFileSystem());
String appName = application.getName();
validateClusterName(appName);
verifyNoLiveApp(appName, "Create");
@@ -692,7 +693,7 @@ public class SliderClient extends AbstractSliderLaunchedService implements RunSe
boolean hasSliderAMLog4j =
addAMLog4jResource(appName, conf, localResources);
// copy jars to hdfs and add to localResources
- Path tempPath = addJarResource(appName, localResources);
+ addJarResource(appName, localResources);
// add keytab if in secure env
addKeytabResourceIfSecure(sliderFileSystem, localResources, conf, appName);
printLocalResources(localResources);
@@ -700,7 +701,7 @@ public class SliderClient extends AbstractSliderLaunchedService implements RunSe
//TODO SliderAMClientProvider#copyEnvVars
//TODO localResource putEnv
- Map<String, String> env = addAMEnv(conf, tempPath);
+ Map<String, String> env = addAMEnv(conf);
// create AM CLI
String cmdStr =
@@ -805,7 +806,7 @@ public class SliderClient extends AbstractSliderLaunchedService implements RunSe
return cmdStr;
}
- private Map<String, String> addAMEnv(Configuration conf, Path tempPath)
+ private Map<String, String> addAMEnv(Configuration conf)
throws IOException {
Map<String, String> env = new HashMap<>();
ClasspathConstructor classpath =
@@ -819,6 +820,13 @@ public class SliderClient extends AbstractSliderLaunchedService implements RunSe
if (jaas != null) {
env.put(HADOOP_JAAS_DEBUG, jaas);
}
+ if (!UserGroupInformation.isSecurityEnabled()) {
+ String userName = UserGroupInformation.getCurrentUser().getUserName();
+ log.info("Run as user " + userName);
+ // HADOOP_USER_NAME env is used by UserGroupInformation when log in
+ // This env makes AM run as this user
+ env.put("HADOOP_USER_NAME", userName);
+ }
env.putAll(getAmLaunchEnv(conf));
log.info("AM env: \n{}", stringifyMap(env));
return env;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/SliderKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
index 9a4fa6c..968a90b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/SliderKeys.java
@@ -240,7 +240,7 @@ public interface SliderKeys extends SliderXmlConfKeys {
String STDERR_AM = "slider-err.txt";
String DEFAULT_GC_OPTS = "";
- String HADOOP_USER_NAME = ApplicationConstants.Environment.USER.toString();
+ String HADOOP_USER_NAME = "HADOOP_USER_NAME";
String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
String SLIDER_PASSPHRASE = "SLIDER_PASSPHRASE";
@@ -306,7 +306,7 @@ public interface SliderKeys extends SliderXmlConfKeys {
String CERT_FILE_LOCALIZATION_PATH = INFRA_RUN_SECURITY_DIR + "ca.crt";
String AM_CONFIG_GENERATION = "am.config.generation";
- String APP_CONF_DIR = "app/conf";
+ String APP_CONF_DIR = "conf";
String APP_RESOURCES = "application.resources";
String APP_RESOURCES_DIR = "app/resources";
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CommandLineBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CommandLineBuilder.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CommandLineBuilder.java
index dbaa981..5ab0532 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CommandLineBuilder.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CommandLineBuilder.java
@@ -32,7 +32,6 @@ import java.util.List;
public class CommandLineBuilder {
protected final List<String> argumentList = new ArrayList<>(20);
-
/**
* Add an entry to the command list
* @param args arguments -these will be converted strings
@@ -44,15 +43,6 @@ public class CommandLineBuilder {
}
/**
- * Get the value at an offset
- * @param offset offset
- * @return the value at that point
- */
- public String elt(int offset) {
- return argumentList.get(offset);
- }
-
- /**
* Get the number of arguments
* @return an integer >= 0
*/
@@ -96,9 +86,4 @@ public class CommandLineBuilder {
public String build() {
return SliderUtils.join(argumentList, " ");
}
-
- public List<String> getArgumentList() {
- return argumentList;
- }
-
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigurationResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigurationResolver.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigurationResolver.java
deleted file mode 100644
index 88bac77..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/registry/docstore/ConfigurationResolver.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.registry.docstore;
-
-public class ConfigurationResolver {
-
-
-}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderRole.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderRole.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderRole.java
index cb39368..9cc48e1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderRole.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderRole.java
@@ -20,6 +20,13 @@ package org.apache.slider.providers;
import org.apache.slider.api.ResourceKeys;
import org.apache.slider.api.resource.Component;
+import org.apache.slider.server.appmaster.state.AppState;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.atomic.AtomicLong;
/**
* Provider role and key for use in app requests.
@@ -36,7 +43,9 @@ public final class ProviderRole {
public final long placementTimeoutSeconds;
public final String labelExpression;
public final Component component;
-
+ public AtomicLong componentIdCounter = null;
+ public AppState appState;
+ public Queue<String> failedInstanceName = new ConcurrentLinkedQueue<String>();
public ProviderRole(String name, int id) {
this(name,
id,
@@ -69,7 +78,7 @@ public final class ProviderRole {
nodeFailureThreshold,
placementTimeoutSeconds,
labelExpression,
- new Component().name(name).numberOfContainers(0L));
+ new Component().name(name).numberOfContainers(0L), null);
}
/**
@@ -79,18 +88,13 @@ public final class ProviderRole {
* @param id ID. This becomes the YARN priority
* @param policy placement policy
* @param nodeFailureThreshold threshold for node failures (within a reset interval)
- * after which a node failure is considered an app failure
+* after which a node failure is considered an app failure
* @param placementTimeoutSeconds for lax placement, timeout in seconds before
* @param labelExpression label expression for requests; may be null
*/
- public ProviderRole(String name,
- String group,
- int id,
- int policy,
- int nodeFailureThreshold,
- long placementTimeoutSeconds,
- String labelExpression,
- Component component) {
+ public ProviderRole(String name, String group, int id, int policy,
+ int nodeFailureThreshold, long placementTimeoutSeconds,
+ String labelExpression, Component component, AppState state) {
this.name = name;
if (group == null) {
this.group = name;
@@ -103,9 +107,13 @@ public final class ProviderRole {
this.placementTimeoutSeconds = placementTimeoutSeconds;
this.labelExpression = labelExpression;
this.component = component;
-
+ if(component.getUniqueComponentSupport()) {
+ componentIdCounter = new AtomicLong(0);
+ }
+ this.appState = state;
}
+
@Override
public boolean equals(Object o) {
if (this == o) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderService.java
index c31b2ac..7e92bfa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderService.java
@@ -26,6 +26,7 @@ import org.apache.slider.api.resource.Application;
import org.apache.slider.common.tools.SliderFileSystem;
import org.apache.slider.core.exceptions.SliderException;
import org.apache.slider.core.launch.ContainerLauncher;
+import org.apache.slider.server.appmaster.state.RoleInstance;
import org.apache.slider.server.appmaster.state.StateAccessForProviders;
import org.apache.slider.server.services.yarnregistry.YarnRegistryViewForProviders;
@@ -38,7 +39,8 @@ public interface ProviderService extends Service {
*/
void buildContainerLaunchContext(ContainerLauncher containerLauncher,
Application application, Container container, ProviderRole providerRole,
- SliderFileSystem sliderFileSystem) throws IOException, SliderException;
+ SliderFileSystem sliderFileSystem, RoleInstance roleInstance)
+ throws IOException, SliderException;
void setAMState(StateAccessForProviders stateAccessForProviders);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderUtils.java
index f8ec976..d384585 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/ProviderUtils.java
@@ -20,10 +20,10 @@ package org.apache.slider.providers;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
@@ -32,7 +32,6 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.slider.api.ClusterNode;
-import org.apache.slider.api.InternalKeys;
import org.apache.slider.api.OptionKeys;
import org.apache.slider.api.ResourceKeys;
import org.apache.slider.api.RoleKeys;
@@ -59,6 +58,7 @@ import org.slf4j.Logger;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
@@ -66,8 +66,12 @@ import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.concurrent.ExecutionException;
import java.util.regex.Pattern;
+import static org.apache.slider.api.ServiceApiConstants.*;
+import static org.apache.slider.util.ServiceApiUtil.$;
+
/**
* This is a factoring out of methods handy for providers. It's bonded to a log
* at construction time.
@@ -89,7 +93,7 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
* Add oneself to the classpath. This does not work
* on minicluster test runs where the JAR is not built up.
* @param providerResources map of provider resources to add these entries to
- * @param provider provider to add
+ * @param providerClass provider to add
* @param jarName name of the jar to use
* @param sliderFileSystem target filesystem
* @param tempPath path in the cluster FS for temp files
@@ -157,14 +161,19 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
libDir, libLocalSrcDir);
}
- // Build key -> value map
- // value will be substituted by corresponding data in tokenMap
- public Map<String, String> substituteConfigs(Map<String, String> configs,
+ public static String substituteStrWithTokens(String content,
+ Map<String, String> tokensForSubstitution) {
+ for (Map.Entry<String, String> token : tokensForSubstitution.entrySet()) {
+ content =
+ content.replaceAll(Pattern.quote(token.getKey()), token.getValue());
+ }
+ return content;
+ }
+
+ // configs will be substituted by corresponding env in tokenMap
+ public void substituteMapWithTokens(Map<String, String> configs,
Map<String, String> tokenMap) {
- String format = "${%s}";
- Map<String, String> filteredOptions = new HashMap<>();
for (Map.Entry<String, String> entry : configs.entrySet()) {
- String key = entry.getKey();
String value = entry.getValue();
if (tokenMap != null) {
for (Map.Entry<String, String> token : tokenMap.entrySet()) {
@@ -172,10 +181,8 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
value.replaceAll(Pattern.quote(token.getKey()), token.getValue());
}
}
- filteredOptions.put(String.format(format, key), value);
+ entry.setValue(value);
}
-
- return filteredOptions;
}
/**
@@ -249,78 +256,95 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
}
}
+ public static void addEnvForSubstitution(Map<String, String> env,
+ Map<String, String> tokensForSubstitution) {
+ if (env == null || env.isEmpty() || tokensForSubstitution == null
+ || tokensForSubstitution.isEmpty()) {
+ return;
+ }
+ for (Map.Entry<String, String> entry : env.entrySet()) {
+ tokensForSubstitution.put($(entry.getKey()), entry.getValue());
+ }
+ }
// 1. Create all config files for a component on hdfs for localization
// 2. Add the config file to localResource
- //TODO handle Template format config file
- public void createConfigFileAndAddLocalResource(ContainerLauncher launcher,
- SliderFileSystem fs, Component component,
- Map<String, String> tokensForSubstitution,
- StateAccessForProviders amState) throws IOException {
+ public synchronized void createConfigFileAndAddLocalResource(
+ ContainerLauncher launcher, SliderFileSystem fs, Component component,
+ Map<String, String> tokensForSubstitution, RoleInstance roleInstance)
+ throws IOException {
Path compDir =
new Path(new Path(fs.getAppDir(), "components"), component.getName());
- if (!fs.getFileSystem().exists(compDir)) {
- fs.getFileSystem().mkdirs(compDir,
+ Path compInstanceDir =
+ new Path(compDir, roleInstance.getCompInstanceName());
+ if (!fs.getFileSystem().exists(compInstanceDir)) {
+ fs.getFileSystem().mkdirs(compInstanceDir,
new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
- log.info("Creating component dir: " + compDir);
+ roleInstance.compInstanceDir = compInstanceDir;
+ log.info("Creating component instance dir: " + compInstanceDir);
} else {
- log.info("Component conf dir already exists: " + compDir);
- return;
+ log.info("Component instance conf dir already exists: " + compInstanceDir);
}
- for (ConfigFile configFile : component.getConfiguration().getFiles()) {
- String fileName = configFile.getSrcFile();
+ // add Configuration#env into tokens substitution
+ addEnvForSubstitution(component.getConfiguration().getEnv(),
+ tokensForSubstitution);
+
+ log.info("Tokens substitution for component: " + roleInstance
+ .getCompInstanceName() + System.lineSeparator()
+ + tokensForSubstitution);
+
+ for (ConfigFile originalFile : component.getConfiguration().getFiles()) {
+ ConfigFile configFile = originalFile.copy();
+ String fileName = new Path(configFile.getDestFile()).getName();
+
// substitute file name
for (Map.Entry<String, String> token : tokensForSubstitution.entrySet()) {
configFile.setDestFile(configFile.getDestFile()
.replaceAll(Pattern.quote(token.getKey()), token.getValue()));
}
- // substitute configs
- substituteConfigs(configFile.getProps(), tokensForSubstitution);
-
- // write configs onto hdfs
- PublishedConfiguration publishedConfiguration =
- new PublishedConfiguration(fileName,
- configFile.getProps().entrySet());
- Path remoteFile = new Path(compDir, fileName);
+
+ Path remoteFile = new Path(compInstanceDir, fileName);
if (!fs.getFileSystem().exists(remoteFile)) {
- synchronized (this) {
- if (!fs.getFileSystem().exists(remoteFile)) {
- PublishedConfigurationOutputter configurationOutputter =
- PublishedConfigurationOutputter.createOutputter(
- ConfigFormat.resolve(configFile.getType().toString()),
- publishedConfiguration);
- FSDataOutputStream os = null;
- try {
- os = fs.getFileSystem().create(remoteFile);
- configurationOutputter.save(os);
- os.flush();
- log.info("Created config file on hdfs: " + remoteFile);
- } finally {
- IOUtils.closeStream(os);
- }
+ log.info("Saving config file on hdfs for component " + roleInstance
+ .getCompInstanceName() + ": " + configFile);
+
+ if (configFile.getSrcFile() != null) {
+ // Load config file template
+ switch (configFile.getType()) {
+ case HADOOP_XML:
+ // Hadoop_xml_template
+ resolveHadoopXmlTemplateAndSaveOnHdfs(fs.getFileSystem(),
+ tokensForSubstitution, configFile, remoteFile, roleInstance);
+ break;
+ case TEMPLATE:
+ // plain-template
+ resolvePlainTemplateAndSaveOnHdfs(fs.getFileSystem(),
+ tokensForSubstitution, configFile, remoteFile, roleInstance);
+ break;
+ default:
+ log.info("Not supporting loading src_file for " + configFile);
+ break;
}
+ } else {
+ // non-template
+ resolveNonTemplateConfigsAndSaveOnHdfs(fs, tokensForSubstitution,
+ roleInstance, configFile, fileName, remoteFile);
}
}
- // Publish configs
- amState.getPublishedSliderConfigurations()
- .put(configFile.getSrcFile(), publishedConfiguration);
-
// Add resource for localization
LocalResource configResource =
fs.createAmResource(remoteFile, LocalResourceType.FILE);
File destFile = new File(configFile.getDestFile());
- //TODO why to we need to differetiate RESOURCE_DIR vs APP_CONF_DIR
+ String symlink = APP_CONF_DIR + "/" + fileName;
if (destFile.isAbsolute()) {
- String symlink = RESOURCE_DIR + "/" + fileName;
launcher.addLocalResource(symlink, configResource,
configFile.getDestFile());
log.info("Add config file for localization: " + symlink + " -> "
+ configResource.getResource().getFile() + ", dest mount path: "
+ configFile.getDestFile());
} else {
- String symlink = APP_CONF_DIR + "/" + fileName;
launcher.addLocalResource(symlink, configResource);
log.info("Add config file for localization: " + symlink + " -> "
+ configResource.getResource().getFile());
@@ -328,23 +352,110 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
}
}
+ private void resolveNonTemplateConfigsAndSaveOnHdfs(SliderFileSystem fs,
+ Map<String, String> tokensForSubstitution, RoleInstance roleInstance,
+ ConfigFile configFile, String fileName, Path remoteFile)
+ throws IOException {
+ // substitute non-template configs
+ substituteMapWithTokens(configFile.getProps(), tokensForSubstitution);
+
+ // write configs onto hdfs
+ PublishedConfiguration publishedConfiguration =
+ new PublishedConfiguration(fileName,
+ configFile.getProps().entrySet());
+ if (!fs.getFileSystem().exists(remoteFile)) {
+ PublishedConfigurationOutputter configurationOutputter =
+ PublishedConfigurationOutputter.createOutputter(
+ ConfigFormat.resolve(configFile.getType().toString()),
+ publishedConfiguration);
+ try (FSDataOutputStream os = fs.getFileSystem().create(remoteFile)) {
+ configurationOutputter.save(os);
+ os.flush();
+ }
+ } else {
+ log.info("Component instance = " + roleInstance.getCompInstanceName()
+ + ", config file already exists: " + remoteFile);
+ }
+ }
+
+ // 1. substitute config template - only handle hadoop_xml format
+ // 2. save on hdfs
+ @SuppressWarnings("unchecked")
+ private void resolveHadoopXmlTemplateAndSaveOnHdfs(FileSystem fs,
+ Map<String, String> tokensForSubstitution, ConfigFile configFile,
+ Path remoteFile, RoleInstance roleInstance) throws IOException {
+ Map<String, String> conf;
+ try {
+ conf = (Map<String, String>) roleInstance.providerRole.
+ appState.configFileCache.get(configFile);
+ } catch (ExecutionException e) {
+ log.info("Failed to load config file: " + configFile, e);
+ return;
+ }
+ // make a copy for substitution
+ org.apache.hadoop.conf.Configuration confCopy =
+ new org.apache.hadoop.conf.Configuration(false);
+ for (Map.Entry<String, String> entry : conf.entrySet()) {
+ confCopy.set(entry.getKey(), entry.getValue());
+ }
+ // substitute properties
+ for (Map.Entry<String, String> entry : configFile.getProps().entrySet()) {
+ confCopy.set(entry.getKey(), entry.getValue());
+ }
+ // substitute env variables
+ for (Map.Entry<String, String> entry : confCopy) {
+ String val = entry.getValue();
+ if (val != null) {
+ for (Map.Entry<String, String> token : tokensForSubstitution
+ .entrySet()) {
+ val = val.replaceAll(Pattern.quote(token.getKey()), token.getValue());
+ confCopy.set(entry.getKey(), val);
+ }
+ }
+ }
+ // save on hdfs
+ try (OutputStream output = fs.create(remoteFile)) {
+ confCopy.writeXml(output);
+ log.info("Reading config from: " + configFile.getSrcFile()
+ + ", writing to: " + remoteFile);
+ }
+ }
+
+ // 1) read the template as a string
+ // 2) do token substitution
+ // 3) save on hdfs
+ private void resolvePlainTemplateAndSaveOnHdfs(FileSystem fs,
+ Map<String, String> tokensForSubstitution, ConfigFile configFile,
+ Path remoteFile, RoleInstance roleInstance) {
+ String content;
+ try {
+ content = (String) roleInstance.providerRole.appState.configFileCache
+ .get(configFile);
+ } catch (ExecutionException e) {
+ log.info("Failed to load config file: " + configFile, e);
+ return;
+ }
+ // substitute tokens
+ substituteStrWithTokens(content, tokensForSubstitution);
+
+ try (OutputStream output = fs.create(remoteFile)) {
+ org.apache.commons.io.IOUtils.write(content, output);
+ } catch (IOException e) {
+ log.info("Failed to create " + remoteFile);
+ }
+ }
+
/**
* Get initial token map to be substituted into config values.
* @param appConf app configurations
- * @param componentName component name
- * @param componentGroup component group
- * @param containerId container ID
* @param clusterName app name
* @return tokens to replace
*/
- public Map<String, String> getStandardTokenMap(
- Configuration appConf, Configuration componentConf, String componentName,
- String componentGroup, String containerId, String clusterName) {
+ public Map<String, String> getStandardTokenMap(Configuration appConf,
+ RoleInstance roleInstance, String clusterName) {
Map<String, String> tokens = new HashMap<>();
- if (containerId != null) {
- tokens.put("${CONTAINER_ID}", containerId);
- }
+
String nnuri = appConf.getProperty("fs.defaultFS");
if (nnuri != null && !nnuri.isEmpty()) {
tokens.put("${NN_URI}", nnuri);
@@ -352,34 +463,13 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
}
tokens.put("${ZK_HOST}", appConf.getProperty(OptionKeys.ZOOKEEPER_HOSTS));
tokens.put("${DEFAULT_ZK_PATH}", appConf.getProperty(OptionKeys.ZOOKEEPER_PATH));
- String prefix = componentConf.getProperty(ROLE_PREFIX);
- String dataDirSuffix = "";
- if (prefix == null) {
- prefix = "";
- } else {
- dataDirSuffix = "_" + SliderUtils.trimPrefix(prefix);
- }
- tokens.put("${DEFAULT_DATA_DIR}",
- appConf.getProperty(InternalKeys.INTERNAL_DATA_DIR_PATH)
- + dataDirSuffix);
- tokens.put("${JAVA_HOME}", appConf.getProperty(JAVA_HOME));
- tokens.put("${COMPONENT_NAME}", componentName);
- tokens.put("${COMPONENT_NAME.lc}", componentName.toLowerCase());
- tokens.put("${COMPONENT_PREFIX}", prefix);
- tokens.put("${COMPONENT_PREFIX.lc}", prefix.toLowerCase());
- if (!componentName.equals(componentGroup) &&
- componentName.startsWith(componentGroup)) {
- tokens.put("${COMPONENT_ID}",
- componentName.substring(componentGroup.length()));
- }
- if (clusterName != null) {
- tokens.put("${CLUSTER_NAME}", clusterName);
- tokens.put("${CLUSTER_NAME.lc}", clusterName.toLowerCase());
- tokens.put("${APP_NAME}", clusterName);
- tokens.put("${APP_NAME.lc}", clusterName.toLowerCase());
- }
- tokens.put("${APP_COMPONENT_NAME}", componentName);
- tokens.put("${APP_COMPONENT_NAME.lc}", componentName.toLowerCase());
+ tokens.put(SERVICE_NAME_LC, clusterName.toLowerCase());
+ tokens.put(SERVICE_NAME, clusterName);
+ tokens.put(COMPONENT_NAME, roleInstance.role);
+ tokens.put(COMPONENT_NAME_LC, roleInstance.role.toLowerCase());
+ tokens.put(COMPONENT_INSTANCE_NAME, roleInstance.getCompInstanceName());
+ tokens.put(CONTAINER_ID, roleInstance.getContainerId().toString());
+ tokens.put(COMPONENT_ID, String.valueOf(roleInstance.componentId));
return tokens;
}
@@ -388,7 +478,7 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
* @param tokens existing tokens
* @param amState access to AM state
*/
- public void addRoleHostTokens(Map<String, String> tokens,
+ public void addComponentHostTokens(Map<String, String> tokens,
StateAccessForProviders amState) {
if (amState == null) {
return;
@@ -398,7 +488,7 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
String tokenName = entry.getKey().toUpperCase(Locale.ENGLISH) + "_HOST";
String hosts = StringUtils .join(",",
getHostsList(entry.getValue().values(), true));
- tokens.put("${" + tokenName + "}", hosts);
+ tokens.put($(tokenName), hosts);
}
}
@@ -443,7 +533,8 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
// create and publish updated service record (including hostname & ip)
ServiceRecord record = new ServiceRecord();
record.set(YarnRegistryAttributes.YARN_ID, containerId);
- record.description = roleName.replaceAll("_", "-");
+ String componentInstanceName = role.getCompInstanceName();
+ record.description = componentInstanceName.replaceAll("_", "-");
record.set(YarnRegistryAttributes.YARN_PERSISTENCE,
PersistencePolicies.CONTAINER);
// TODO: use constants from YarnRegistryAttributes
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/docker/DockerProviderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/docker/DockerProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/docker/DockerProviderService.java
index 511f7bc..93a481c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/docker/DockerProviderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/providers/docker/DockerProviderService.java
@@ -46,6 +46,8 @@ import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
+import static org.apache.slider.util.ServiceApiUtil.$;
+
public class DockerProviderService extends AbstractService
implements ProviderService, DockerKeys, SliderKeys {
@@ -70,14 +72,11 @@ public class DockerProviderService extends AbstractService
this.yarnRegistry = yarnRegistry;
}
+
public void buildContainerLaunchContext(ContainerLauncher launcher,
Application application, Container container, ProviderRole providerRole,
- SliderFileSystem fileSystem)
+ SliderFileSystem fileSystem, RoleInstance roleInstance)
throws IOException, SliderException {
-
- String roleName = providerRole.name;
- String roleGroup = providerRole.group;
-
Component component = providerRole.component;
launcher.setYarnDockerMode(true);
launcher.setDockerImage(component.getArtifact().getId());
@@ -86,16 +85,12 @@ public class DockerProviderService extends AbstractService
launcher.setRunPrivilegedContainer(component.getRunPrivilegedContainer());
// Generate tokens (key-value pair) for config substitution.
- Map<String, String> standardTokens = providerUtils
- .getStandardTokenMap(application.getConfiguration(),
- component.getConfiguration(), roleName, roleGroup,
- container.getId().toString(), application.getName());
- Map<String, String> tokensForSubstitution = providerUtils.substituteConfigs(
- component.getConfiguration().getProperties(), standardTokens);
-
- tokensForSubstitution.putAll(standardTokens);
+ // Get pre-defined tokens
+ Map<String, String> tokensForSubstitution = providerUtils
+ .getStandardTokenMap(application.getConfiguration(), roleInstance,
+ application.getName());
- // Set the environment variables
+ // Set the environment variables in launcher
launcher.putEnv(SliderUtils
.buildEnvMap(component.getConfiguration(), tokensForSubstitution));
launcher.setEnv("WORK_DIR", ApplicationConstants.Environment.PWD.$());
@@ -108,33 +103,26 @@ public class DockerProviderService extends AbstractService
launcher.setEnv("LANGUAGE", "en_US.UTF-8");
for (Entry<String, String> entry : launcher.getEnv().entrySet()) {
- tokensForSubstitution.put("${" + entry.getKey() + "}", entry.getValue());
- }
-
- providerUtils.addRoleHostTokens(tokensForSubstitution, amState);
-
- log.info("Token for substitution: " + tokensForSubstitution);
-
- if (SliderUtils.isHadoopClusterSecure(getConfig())) {
- //TODO localize key tabs, WHY is this code needed ? WHY DOES CONTAINER REQUIRE AM KEYTAB??
- providerUtils.localizeServiceKeytabs(launcher, fileSystem, application);
+ tokensForSubstitution.put($(entry.getKey()), entry.getValue());
}
+ providerUtils.addComponentHostTokens(tokensForSubstitution, amState);
// create config file on hdfs and add local resource
providerUtils.createConfigFileAndAddLocalResource(launcher, fileSystem,
- component, tokensForSubstitution, amState);
+ component, tokensForSubstitution, roleInstance);
+ // substitute launch command
+ String launchCommand = ProviderUtils
+ .substituteStrWithTokens(component.getLaunchCommand(),
+ tokensForSubstitution);
CommandLineBuilder operation = new CommandLineBuilder();
- operation.add(component.getLaunchCommand());
- operation.add("> " + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/"
- + OUT_FILE + " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/"
- + ERR_FILE);
+ operation.add(launchCommand);
+ operation.addOutAndErrFiles(OUT_FILE, ERR_FILE);
launcher.addCommand(operation.build());
// publish exports
- // TODO move this to app level, no need to do this for every container launch
providerUtils
- .substituteConfigs(application.getQuicklinks(), tokensForSubstitution);
+ .substituteMapWithTokens(application.getQuicklinks(), tokensForSubstitution);
PublishedConfiguration pubconf = new PublishedConfiguration(QUICK_LINKS,
application.getQuicklinks().entrySet());
amState.getPublishedSliderConfigurations().put(QUICK_LINKS, pubconf);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
index 70eab71..c53349f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
@@ -150,24 +150,26 @@ public class RoleLaunchService
containerLauncher.setupUGI();
containerLauncher.putEnv(envVars);
- log.info("Launching container {} into RoleName = {}, RoleGroup = {}",
- container.getId(), role.name, role.group);
-
- provider.buildContainerLaunchContext(containerLauncher, application,
- container, role, fs);
-
- RoleInstance instance = new RoleInstance(container);
+ String failedInstance = role.failedInstanceName.poll();
+ RoleInstance instance;
+ if (failedInstance != null) {
+ instance = new RoleInstance(container, role, failedInstance);
+ } else {
+ instance = new RoleInstance(container, role);
+ }
String[] envDescription = containerLauncher.dumpEnvToString();
-
String commandsAsString = containerLauncher.getCommandsAsString();
+ log.info("Launching container {} for component instance = {}",
+ container.getId(), instance.getCompInstanceName());
log.info("Starting container with command: {}", commandsAsString);
-
- instance.providerRole = role;
instance.command = commandsAsString;
instance.role = role.name;
- instance.group = role.group;
instance.roleId = role.id;
instance.environment = envDescription;
+
+ provider.buildContainerLaunchContext(containerLauncher, application,
+ container, role, fs, instance);
+
long delay = role.component.getConfiguration()
.getPropertyLong(AgentKeys.KEY_CONTAINER_LAUNCH_DELAY, 0);
long maxDelay = getConfig()
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java
index 7473dab..1f379ea 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java
@@ -25,6 +25,7 @@ import com.google.protobuf.BlockingService;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
@@ -86,6 +87,7 @@ import org.apache.slider.api.RoleKeys;
import org.apache.slider.api.proto.Messages;
import org.apache.slider.api.proto.SliderClusterAPI;
import org.apache.slider.api.resource.Application;
+import org.apache.slider.api.resource.Component;
import org.apache.slider.common.SliderExitCodes;
import org.apache.slider.common.SliderKeys;
import org.apache.slider.common.params.AbstractActionArgs;
@@ -109,7 +111,6 @@ import org.apache.slider.core.main.ServiceLauncher;
import org.apache.slider.core.persist.JsonSerDeser;
import org.apache.slider.core.registry.info.CustomRegistryConstants;
import org.apache.slider.providers.ProviderCompleted;
-import org.apache.slider.providers.ProviderRole;
import org.apache.slider.providers.ProviderService;
import org.apache.slider.providers.SliderProviderFactory;
import org.apache.slider.server.appmaster.actions.ActionHalt;
@@ -136,7 +137,6 @@ import org.apache.slider.server.appmaster.operations.RMOperationHandler;
import org.apache.slider.server.appmaster.rpc.RpcBinder;
import org.apache.slider.server.appmaster.rpc.SliderClusterProtocolPBImpl;
import org.apache.slider.server.appmaster.rpc.SliderIPCService;
-import org.apache.slider.server.appmaster.security.SecurityConfiguration;
import org.apache.slider.server.appmaster.state.AppState;
import org.apache.slider.server.appmaster.state.AppStateBindingInfo;
import org.apache.slider.server.appmaster.state.ContainerAssignment;
@@ -170,7 +170,6 @@ import java.nio.ByteBuffer;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
@@ -701,10 +700,7 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
registryOperations = startRegistryOperationsService();
log.info(registryOperations.toString());
- //build the role map
- List<ProviderRole> providerRoles = Collections.EMPTY_LIST;
// Start up the WebApp and track the URL for it
-
// Web service endpoints: initialize
WebAppApiImpl webAppApi =
new WebAppApiImpl(
@@ -815,7 +811,6 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
//build the instance
AppStateBindingInfo binding = new AppStateBindingInfo();
binding.serviceConfig = serviceConf;
- binding.roles = providerRoles;
binding.fs = fs.getFileSystem();
binding.historyPath = historyDir;
binding.liveContainers = liveContainers;
@@ -873,6 +868,11 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
scheduleFailureWindowResets(application.getConfiguration());
scheduleEscalation(application.getConfiguration());
+ for (Component component : application.getComponents()) {
+ // Merge app-level configuration into component level configuration
+ component.getConfiguration().mergeFrom(application.getConfiguration());
+ }
+
try {
// schedule YARN Registry registration
queue(new ActionRegisterServiceInstance(appName, appid, application));
@@ -1170,22 +1170,22 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
* Handler for {@link RegisterComponentInstance action}
* Register/re-register an ephemeral container that is already in the application state
* @param id the component
- * @param description component description
- * @param type component type
* @return true if the component is registered
*/
- public boolean registerComponent(ContainerId id, String description,
- String type) throws IOException {
+ public boolean registerComponent(ContainerId id, RoleInstance roleInstance)
+ throws IOException {
RoleInstance instance = appState.getOwnedContainer(id);
if (instance == null) {
return false;
}
// this is where component registrations go
- log.info("Registering component {}", id);
String cid = RegistryPathUtils.encodeYarnID(id.toString());
ServiceRecord record = new ServiceRecord();
record.set(YarnRegistryAttributes.YARN_ID, cid);
- record.description = description;
+
+ record.description = roleInstance.getCompInstanceName();
+ log.info("Registering component " + roleInstance.getCompInstanceName()
+ + ", containerId = " + id);
record.set(YarnRegistryAttributes.YARN_PERSISTENCE,
PersistencePolicies.CONTAINER);
setUserProvidedServiceRecordAttributes(
@@ -1194,7 +1194,7 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
yarnRegistryOperations.putComponent(cid, record);
} catch (IOException e) {
log.warn("Failed to register container {}/{}: {}",
- id, description, e, e);
+ id, roleInstance.role, e, e);
return false;
}
org.apache.slider.api.resource.Container container =
@@ -1203,6 +1203,8 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
container.setLaunchTime(new Date());
container.setState(org.apache.slider.api.resource.ContainerState.INIT);
container.setBareHost(instance.host);
+ // TODO differentiate component name and component instance name ?
+ container.setComponentName(roleInstance.getCompInstanceName());
instance.providerRole.component.addContainer(container);
if (timelineServiceEnabled) {
@@ -1228,20 +1230,38 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
*
* unregister a component. At the time this message is received,
* the component may not have been registered
- * @param id the component
*/
- public void unregisterComponent(ContainerId id) {
- log.info("Unregistering component {}", id);
+ public void unregisterComponent(RoleInstance roleInstance) {
+ ContainerId containerId = roleInstance.getContainerId();
+ log.info(
+ "Unregistering component instance " + roleInstance.getCompInstanceName()
+ + ", ContainerId = " + containerId);
if (yarnRegistryOperations == null) {
- log.warn("Processing unregister component event before initialization " +
- "completed; init flag ={}", initCompleted);
+ log.warn("Processing unregister component event before initialization "
+ + "completed; init flag ={}", initCompleted);
return;
}
- String cid = RegistryPathUtils.encodeYarnID(id.toString());
+ String cid = RegistryPathUtils.encodeYarnID(containerId.toString());
try {
yarnRegistryOperations.deleteComponent(cid);
} catch (IOException e) {
- log.warn("Failed to delete container {} : {}", id, e, e);
+ log.warn("Failed to delete container {} : {}", containerId, e, e);
+ }
+
+ // remove component instance dir
+ try {
+ FileSystem fs = getClusterFS().getFileSystem();
+ if (roleInstance.compInstanceDir != null && fs
+ .exists(roleInstance.compInstanceDir)) {
+ boolean deleted = fs.delete(roleInstance.compInstanceDir, true);
+ if (!deleted) {
+ log.warn("Failed to delete component instance dir: "
+ + roleInstance.compInstanceDir);
+ }
+ }
+ } catch (IOException e) {
+ log.error("Failed to delete component instance dir: "
+ + roleInstance.compInstanceDir, e);
}
}
@@ -1395,13 +1415,6 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
return exitCode;
}
- /**
- * Get diagnostics info about containers
- */
- private String getContainerDiagnosticInfo() {
-
- return appState.getContainerDiagnosticInfo();
- }
public Object getProxy(Class protocol, InetSocketAddress addr) {
return yarnRPC.getProxy(protocol, addr, getConfig());
@@ -1492,7 +1505,6 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
//for all the operations, exec them
execute(operations);
- log.info("Diagnostics: {}", getContainerDiagnosticInfo());
}
@Override //AMRMClientAsync
@@ -1519,8 +1531,9 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
// known nodes trigger notifications
if(!result.unknownNode) {
- queue(new UnregisterComponentInstance(containerId, 0,
- TimeUnit.MILLISECONDS));
+ queue(new UnregisterComponentInstance(0,
+ TimeUnit.MILLISECONDS, result.roleInstance));
+
if (timelineServiceEnabled && result.roleInstance != null) {
serviceTimelinePublisher
.componentInstanceFinished(result.roleInstance);
@@ -1936,7 +1949,7 @@ public class SliderAppMaster extends AbstractSliderLaunchedService
nmClientAsync.getContainerStatusAsync(containerId,
cinfo.container.getNodeId());
// push out a registration
- queue(new RegisterComponentInstance(containerId, cinfo.role, cinfo.group,
+ queue(new RegisterComponentInstance(containerId, cinfo,
0, TimeUnit.MILLISECONDS));
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/RegisterComponentInstance.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/RegisterComponentInstance.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/RegisterComponentInstance.java
index 4cf4981..3c1bed8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/RegisterComponentInstance.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/RegisterComponentInstance.java
@@ -22,6 +22,7 @@ import com.google.common.base.Preconditions;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.slider.server.appmaster.SliderAppMaster;
import org.apache.slider.server.appmaster.state.AppState;
+import org.apache.slider.server.appmaster.state.RoleInstance;
import java.util.concurrent.TimeUnit;
@@ -33,18 +34,15 @@ import java.util.concurrent.TimeUnit;
public class RegisterComponentInstance extends AsyncAction {
public final ContainerId containerId;
- public final String description;
- public final String type;
+ public final RoleInstance roleInstance;
public RegisterComponentInstance(ContainerId containerId,
- String description,
- String type,
+ RoleInstance roleInstance,
long delay,
TimeUnit timeUnit) {
super("RegisterComponentInstance :" + containerId,
delay, timeUnit);
- this.description = description;
- this.type = type;
+ this.roleInstance = roleInstance;
Preconditions.checkArgument(containerId != null);
this.containerId = containerId;
}
@@ -54,6 +52,6 @@ public class RegisterComponentInstance extends AsyncAction {
QueueAccess queueService,
AppState appState) throws Exception {
- appMaster.registerComponent(containerId, description, type);
+ appMaster.registerComponent(containerId, roleInstance);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/UnregisterComponentInstance.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/UnregisterComponentInstance.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/UnregisterComponentInstance.java
index 575fe8f..ac86333 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/UnregisterComponentInstance.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/UnregisterComponentInstance.java
@@ -21,31 +21,31 @@ package org.apache.slider.server.appmaster.actions;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.slider.server.appmaster.SliderAppMaster;
import org.apache.slider.server.appmaster.state.AppState;
+import org.apache.slider.server.appmaster.state.RoleInstance;
import java.util.concurrent.TimeUnit;
/**
* Tell AM to unregister this component instance
- * {@link SliderAppMaster#unregisterComponent(ContainerId)}
*/
public class UnregisterComponentInstance extends AsyncAction {
- public final ContainerId containerId;
+ public final RoleInstance roleInstance;
- public UnregisterComponentInstance(ContainerId containerId,
- long delay,
- TimeUnit timeUnit) {
- super("UnregisterComponentInstance :" + containerId.toString(),
+ public UnregisterComponentInstance(long delay, TimeUnit timeUnit,
+ RoleInstance roleInstance) {
+ super("UnregisterComponentInstance :" + roleInstance.getCompInstanceName()
+ + ", ContainerId = " + roleInstance.getContainerId(),
delay, timeUnit);
- this.containerId = containerId;
+ this.roleInstance = roleInstance;
}
@Override
public void execute(SliderAppMaster appMaster,
QueueAccess queueService,
AppState appState) throws Exception {
- appMaster.unregisterComponent(containerId);
+ appMaster.unregisterComponent(roleInstance);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/metrics/SliderMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/metrics/SliderMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/metrics/SliderMetrics.java
index 5dcbe9b..cf607a0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/metrics/SliderMetrics.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/metrics/SliderMetrics.java
@@ -39,24 +39,34 @@ public class SliderMetrics implements MetricsSource {
@Metric("containers requested")
public MutableGaugeInt containersRequested;
+
@Metric("anti-affinity containers pending")
public MutableGaugeInt pendingAAContainers;
+
@Metric("containers running")
public MutableGaugeInt containersRunning;
+
@Metric("containers desired")
public MutableGaugeInt containersDesired;
+
@Metric("containers completed")
public MutableGaugeInt containersCompleted;
+
@Metric("containers failed")
public MutableGaugeInt containersFailed;
+
@Metric("containers failed since last threshold")
public MutableGaugeInt failedSinceLastThreshold;
+
@Metric("containers preempted")
public MutableGaugeInt containersPreempted;
+
@Metric("containers exceeded limits")
public MutableGaugeInt containersLimitsExceeded;
+
@Metric("containers surplus")
public MutableGaugeInt surplusContainers;
+
@Metric("containers failed due to disk failure")
public MutableGaugeInt containersDiskFailure;
@@ -80,5 +90,18 @@ public class SliderMetrics implements MetricsSource {
public void tag(String name, String description, String value) {
registry.tag(name, description, value);
}
+
+ @Override public String toString() {
+ return "SliderMetrics{"
+ + "containersRequested=" + containersRequested.value()
+ + ", pendingAAContainers=" + pendingAAContainers.value()
+ + ", containersRunning=" + containersRunning.value()
+ + ", containersDesired=" + containersDesired.value()
+ + ", containersCompleted=" + containersCompleted.value()
+ + ", containersFailed=" + containersFailed.value()
+ + ", failedSinceLastThreshold=" + failedSinceLastThreshold.value()
+ + ", containersPreempted=" + containersPreempted.value()
+ + ", surplusContainers=" + surplusContainers.value() + '}';
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/monkey/ChaosKillContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/monkey/ChaosKillContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/monkey/ChaosKillContainer.java
index ae38e4c..1406fbe 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/monkey/ChaosKillContainer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/monkey/ChaosKillContainer.java
@@ -77,7 +77,7 @@ public class ChaosKillContainer implements ChaosTarget {
RoleInstance roleInstance = liveContainers.get(target);
log.info("Killing {}", roleInstance);
- queues.schedule(new ActionKillContainer(roleInstance.getId(),
+ queues.schedule(new ActionKillContainer(roleInstance.getContainerId(),
DELAY, TimeUnit.MILLISECONDS, operationHandler));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2fe06028/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/rpc/SliderIPCService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/rpc/SliderIPCService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/rpc/SliderIPCService.java
index f88d586..22f9bc3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/rpc/SliderIPCService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/rpc/SliderIPCService.java
@@ -283,7 +283,7 @@ public class SliderIPCService extends AbstractService
//throws NoSuchNodeException if it is missing
RoleInstance instance =
state.getLiveInstanceByContainerID(containerID);
- queue(new ActionKillContainer(instance.getId(), 0, TimeUnit.MILLISECONDS,
+ queue(new ActionKillContainer(instance.getContainerId(), 0, TimeUnit.MILLISECONDS,
amOperations));
Messages.KillContainerResponseProto.Builder builder =
Messages.KillContainerResponseProto.newBuilder();
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org