You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by at...@apache.org on 2011/09/14 00:49:38 UTC
svn commit: r1170378 [5/12] - in
/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/
dev-support/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/hadoop-mapreduce-clie...
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java Tue Sep 13 22:49:27 2011
@@ -23,16 +23,11 @@ import java.lang.reflect.Method;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl;
public class RecordFactoryProvider {
-
- public static final String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
- public static final String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
-
- public static final String RECORD_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.record.factory.class";
-
private static Configuration defaultConf;
static {
@@ -48,13 +43,13 @@ public class RecordFactoryProvider {
//Users can specify a particular factory by providing a configuration.
conf = defaultConf;
}
- String recordFactoryClassName = conf.get(RECORD_FACTORY_CLASS_KEY);
+ String recordFactoryClassName = conf.get(YarnConfiguration.IPC_RECORD_FACTORY);
if (recordFactoryClassName == null) {
- String serializer = conf.get(RPC_SERIALIZER_KEY, RPC_SERIALIZER_DEFAULT);
- if (serializer.equals(RPC_SERIALIZER_DEFAULT)) {
+ String serializer = conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE, YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE);
+ if (serializer.equals(YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE)) {
return RecordFactoryPBImpl.get();
} else {
- throw new YarnException("Unknown serializer: [" + conf.get(RPC_SERIALIZER_KEY) + "]. Use keys: [" + RECORD_FACTORY_CLASS_KEY + "] to specify Record factory");
+ throw new YarnException("Unknown serializer: [" + conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE) + "]. Use keys: [" + YarnConfiguration.IPC_RECORD_FACTORY + "] to specify Record factory");
}
} else {
return (RecordFactory) getFactoryClassInstance(recordFactoryClassName);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java Tue Sep 13 22:49:27 2011
@@ -25,6 +25,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RpcClientFactory;
import org.apache.hadoop.yarn.factories.RpcServerFactory;
import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl;
@@ -35,13 +36,7 @@ import org.apache.hadoop.yarn.factories.
*/
public class RpcFactoryProvider {
private static final Log LOG = LogFactory.getLog(RpcFactoryProvider.class);
- //TODO Move these keys to CommonConfigurationKeys
- public static final String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
- public static final String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
- public static final String RPC_CLIENT_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.client.factory.class";
- public static final String RPC_SERVER_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.server.factory.class";
-
private RpcFactoryProvider() {
}
@@ -51,12 +46,12 @@ public class RpcFactoryProvider {
if (conf == null) {
conf = new Configuration();
}
- String serverFactoryClassName = conf.get(RPC_SERVER_FACTORY_CLASS_KEY);
+ String serverFactoryClassName = conf.get(YarnConfiguration.IPC_SERVER_FACTORY);
if (serverFactoryClassName == null) {
- if (conf.get(RPC_SERIALIZER_KEY, RPC_SERIALIZER_DEFAULT).equals(RPC_SERIALIZER_DEFAULT)) {
+ if (conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE, YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE).equals(YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE)) {
return RpcServerFactoryPBImpl.get();
} else {
- throw new YarnException("Unknown serializer: [" + conf.get(RPC_SERIALIZER_KEY) + "]. Use keys: [" + RPC_CLIENT_FACTORY_CLASS_KEY + "][" + RPC_SERVER_FACTORY_CLASS_KEY + "] to specify factories");
+ throw new YarnException("Unknown serializer: [" + conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE) + "]. Use keys: [" + YarnConfiguration.IPC_CLIENT_FACTORY + "][" + YarnConfiguration.IPC_SERVER_FACTORY + "] to specify factories");
}
} else {
return (RpcServerFactory) getFactoryClassInstance(serverFactoryClassName);
@@ -64,12 +59,12 @@ public class RpcFactoryProvider {
}
public static RpcClientFactory getClientFactory(Configuration conf) {
- String clientFactoryClassName = conf.get(RPC_CLIENT_FACTORY_CLASS_KEY);
+ String clientFactoryClassName = conf.get(YarnConfiguration.IPC_CLIENT_FACTORY);
if (clientFactoryClassName == null) {
- if (conf.get(RPC_SERIALIZER_KEY, RPC_SERIALIZER_DEFAULT).equals(RPC_SERIALIZER_DEFAULT)) {
+ if (conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE, YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE).equals(YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE)) {
return RpcClientFactoryPBImpl.get();
} else {
- throw new YarnException("Unknown serializer: [" + conf.get(RPC_SERIALIZER_KEY) + "]. Use keys: [" + RPC_CLIENT_FACTORY_CLASS_KEY + "][" + RPC_SERVER_FACTORY_CLASS_KEY + "] to specify factories");
+ throw new YarnException("Unknown serializer: [" + conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE) + "]. Use keys: [" + YarnConfiguration.IPC_CLIENT_FACTORY + "][" + YarnConfiguration.IPC_SERVER_FACTORY + "] to specify factories");
}
} else {
return(RpcClientFactory) getFactoryClassInstance(clientFactoryClassName);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/YarnRemoteExceptionFactoryProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/YarnRemoteExceptionFactoryProvider.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/YarnRemoteExceptionFactoryProvider.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/YarnRemoteExceptionFactoryProvider.java Tue Sep 13 22:49:27 2011
@@ -23,16 +23,12 @@ import java.lang.reflect.Method;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.YarnRemoteExceptionFactory;
import org.apache.hadoop.yarn.factories.impl.pb.YarnRemoteExceptionFactoryPBImpl;
public class YarnRemoteExceptionFactoryProvider {
- public static final String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
- public static final String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
-
- public static final String EXCEPTION_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.exception.factory.class";
-
private YarnRemoteExceptionFactoryProvider() {
}
@@ -40,13 +36,13 @@ public class YarnRemoteExceptionFactoryP
if (conf == null) {
conf = new Configuration();
}
- String recordFactoryClassName = conf.get(EXCEPTION_FACTORY_CLASS_KEY);
+ String recordFactoryClassName = conf.get(YarnConfiguration.IPC_EXCEPTION_FACTORY);
if (recordFactoryClassName == null) {
- String serializer = conf.get(RPC_SERIALIZER_KEY, RPC_SERIALIZER_DEFAULT);
- if (serializer.equals(RPC_SERIALIZER_DEFAULT)) {
+ String serializer = conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE, YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE);
+ if (serializer.equals(YarnConfiguration.DEFAULT_IPC_SERIALIZER_TYPE)) {
return YarnRemoteExceptionFactoryPBImpl.get();
} else {
- throw new YarnException("Unknown serializer: [" + conf.get(RPC_SERIALIZER_KEY) + "]. Use keys: [" + EXCEPTION_FACTORY_CLASS_KEY + "] to specify Exception factory");
+ throw new YarnException("Unknown serializer: [" + conf.get(YarnConfiguration.IPC_SERIALIZER_TYPE) + "]. Use keys: [" + YarnConfiguration.IPC_EXCEPTION_FACTORY + "] to specify Exception factory");
}
} else {
return (YarnRemoteExceptionFactory) getFactoryClassInstance(recordFactoryClassName);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java Tue Sep 13 22:49:27 2011
@@ -73,15 +73,6 @@ public class ProtoOverHadoopRpcEngine im
addr, ticket, conf, factory, rpcTimeout)), false);
}
- @Override
- public void stopProxy(Object proxy) {
- try {
- ((Invoker) Proxy.getInvocationHandler(proxy)).close();
- } catch (IOException e) {
- LOG.warn("Error while stopping " + proxy, e);
- }
- }
-
private static class Invoker implements InvocationHandler, Closeable {
private Map<String, Message> returnTypes = new ConcurrentHashMap<String, Message>();
private boolean isClosed = false;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java Tue Sep 13 22:49:27 2011
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
/**
* Abstraction to get the RPC implementation for Yarn.
@@ -34,13 +35,6 @@ import org.apache.hadoop.yarn.YarnExcept
public abstract class YarnRPC {
private static final Log LOG = LogFactory.getLog(YarnRPC.class);
- public static final String RPC_CLASSNAME
- = "org.apache.hadoop.yarn.ipc.YarnRPC.classname";
-
- //use the default as Hadoop RPC
- public static final String DEFAULT_RPC
- = "org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC";
-
public abstract Object getProxy(Class protocol, InetSocketAddress addr,
Configuration conf);
@@ -50,10 +44,10 @@ public abstract class YarnRPC {
int numHandlers);
public static YarnRPC create(Configuration conf) {
- LOG.info("Creating YarnRPC for " + conf.get(RPC_CLASSNAME));
- String clazzName = conf.get(RPC_CLASSNAME);
+ LOG.info("Creating YarnRPC for " + conf.get(YarnConfiguration.IPC_RPC_IMPL));
+ String clazzName = conf.get(YarnConfiguration.IPC_RPC_IMPL);
if (clazzName == null) {
- clazzName = DEFAULT_RPC;
+ clazzName = YarnConfiguration.DEFAULT_IPC_RPC_IMPL;
}
try {
return (YarnRPC) Class.forName(clazzName).newInstance();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenIdentifier.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenIdentifier.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenIdentifier.java Tue Sep 13 22:49:27 2011
@@ -68,28 +68,42 @@ public class ContainerTokenIdentifier ex
@Override
public void write(DataOutput out) throws IOException {
LOG.debug("Writing ContainerTokenIdentifier to RPC layer");
- out.writeInt(this.containerId.getAppId().getId());
- out.writeInt(this.containerId.getAppAttemptId().getAttemptId());
+ ApplicationAttemptId applicationAttemptId =
+ containerId.getApplicationAttemptId();
+ ApplicationId applicationId = applicationAttemptId.getApplicationId();
+ out.writeLong(applicationId.getClusterTimestamp());
+ out.writeInt(applicationId.getId());
+ out.writeInt(applicationAttemptId.getAttemptId());
out.writeInt(this.containerId.getId());
- // TODO: Cluster time-stamp?
out.writeUTF(this.nmHostName);
- out.writeInt(this.resource.getMemory()); // TODO: more resources.
+ out.writeInt(this.resource.getMemory());
}
@Override
public void readFields(DataInput in) throws IOException {
- this.containerId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ContainerId.class);
- this.containerId.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
- this.containerId.setAppAttemptId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationAttemptId.class));
- this.containerId.getAppId().setId(in.readInt());
- this.containerId.getAppAttemptId().setApplicationId(this.containerId.getAppId());
- this.containerId.getAppAttemptId().setAttemptId(in.readInt());
+ this.containerId =
+ RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
+ ContainerId.class);
+ ApplicationAttemptId applicationAttemptId =
+ RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
+ ApplicationAttemptId.class);
+ ApplicationId applicationId =
+ RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
+ ApplicationId.class);
+ applicationId.setClusterTimestamp(in.readLong());
+ applicationId.setId(in.readInt());
+ applicationAttemptId.setApplicationId(applicationId);
+ applicationAttemptId.setAttemptId(in.readInt());
+ this.containerId.setApplicationAttemptId(applicationAttemptId);
this.containerId.setId(in.readInt());
this.nmHostName = in.readUTF();
- this.resource = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Resource.class);
- this.resource.setMemory(in.readInt()); // TODO: more resources.
+ this.resource =
+ RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
+ Resource.class);
+ this.resource.setMemory(in.readInt());
}
+ @SuppressWarnings("static-access")
@Override
public Text getKind() {
return this.KIND;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientRMSecurityInfo.java Tue Sep 13 22:49:27 2011
@@ -44,7 +44,7 @@ public class ClientRMSecurityInfo extend
@Override
public String serverPrincipal() {
- return YarnConfiguration.RM_SERVER_PRINCIPAL_KEY;
+ return YarnConfiguration.RM_PRINCIPAL;
}
@Override
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java Tue Sep 13 22:49:27 2011
@@ -33,10 +33,6 @@ public class Apps {
public static final String APP = "app";
public static final String ID = "ID";
- public static String toString(ApplicationId id) {
- return _join("app", id.getClusterTimestamp(), id.getId());
- }
-
public static ApplicationId toAppID(String aid) {
Iterator<String> it = _split(aid).iterator();
return toAppID(APP, aid, it);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java Tue Sep 13 22:49:27 2011
@@ -36,6 +36,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -86,12 +87,11 @@ public class BuilderUtils {
}
}
- public static LocalResource newLocalResource(RecordFactory recordFactory,
- URI uri, LocalResourceType type, LocalResourceVisibility visibility,
- long size, long timestamp) {
+ public static LocalResource newLocalResource(URL url, LocalResourceType type,
+ LocalResourceVisibility visibility, long size, long timestamp) {
LocalResource resource =
- recordFactory.newRecordInstance(LocalResource.class);
- resource.setResource(ConverterUtils.getYarnUrlFromURI(uri));
+ recordFactory.newRecordInstance(LocalResource.class);
+ resource.setResource(url);
resource.setType(type);
resource.setVisibility(visibility);
resource.setSize(size);
@@ -99,6 +99,13 @@ public class BuilderUtils {
return resource;
}
+ public static LocalResource newLocalResource(URI uri,
+ LocalResourceType type, LocalResourceVisibility visibility, long size,
+ long timestamp) {
+ return newLocalResource(ConverterUtils.getYarnUrlFromURI(uri), type,
+ visibility, size, timestamp);
+ }
+
public static ApplicationId newApplicationId(RecordFactory recordFactory,
long clustertimestamp, CharSequence id) {
ApplicationId applicationId =
@@ -125,6 +132,15 @@ public class BuilderUtils {
return applicationId;
}
+ public static ApplicationAttemptId newApplicationAttemptId(
+ ApplicationId appId, int attemptId) {
+ ApplicationAttemptId appAttemptId =
+ recordFactory.newRecordInstance(ApplicationAttemptId.class);
+ appAttemptId.setApplicationId(appId);
+ appAttemptId.setAttemptId(attemptId);
+ return appAttemptId;
+ }
+
public static ApplicationId convert(long clustertimestamp, CharSequence id) {
ApplicationId applicationId =
recordFactory.newRecordInstance(ApplicationId.class);
@@ -133,13 +149,29 @@ public class BuilderUtils {
return applicationId;
}
+ public static ContainerId newContainerId(ApplicationAttemptId appAttemptId,
+ int containerId) {
+ ContainerId id = recordFactory.newRecordInstance(ContainerId.class);
+ id.setId(containerId);
+ id.setApplicationAttemptId(appAttemptId);
+ return id;
+ }
+
+ public static ContainerId newContainerId(int appId, int appAttemptId,
+ long timestamp, int id) {
+ ApplicationId applicationId = newApplicationId(timestamp, appId);
+ ApplicationAttemptId applicationAttemptId = newApplicationAttemptId(
+ applicationId, appAttemptId);
+ ContainerId cId = newContainerId(applicationAttemptId, id);
+ return cId;
+ }
+
public static ContainerId newContainerId(RecordFactory recordFactory,
ApplicationId appId, ApplicationAttemptId appAttemptId,
int containerId) {
ContainerId id = recordFactory.newRecordInstance(ContainerId.class);
- id.setAppId(appId);
id.setId(containerId);
- id.setAppAttemptId(appAttemptId);
+ id.setApplicationAttemptId(appAttemptId);
return id;
}
@@ -147,8 +179,7 @@ public class BuilderUtils {
ApplicationAttemptId appAttemptId,
int containerId) {
ContainerId id = recordFactory.newRecordInstance(ContainerId.class);
- id.setAppAttemptId(appAttemptId);
- id.setAppId(appAttemptId.getApplicationId());
+ id.setApplicationAttemptId(appAttemptId);
id.setId(containerId);
return id;
}
@@ -227,4 +258,20 @@ public class BuilderUtils {
report.setStartTime(startTime);
return report;
}
+
+ public static Resource newResource(int memory) {
+ Resource resource = recordFactory.newRecordInstance(Resource.class);
+ resource.setMemory(memory);
+ return resource;
+ }
+
+ public static URL newURL(String scheme, String host, int port, String file) {
+ URL url = recordFactory.newRecordInstance(URL.class);
+ url.setScheme(scheme);
+ url.setHost(host);
+ url.setPort(port);
+ url.setFile(file);
+ return url;
+ }
+
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java Tue Sep 13 22:49:27 2011
@@ -29,6 +29,7 @@ import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.URL;
@@ -130,6 +131,20 @@ public class ConverterUtils {
return appId;
}
+ private static ApplicationAttemptId toApplicationAttemptId(
+ RecordFactory recordFactory,
+ Iterator<String> it) {
+ ApplicationId appId =
+ recordFactory.newRecordInstance(ApplicationId.class);
+ appId.setClusterTimestamp(Long.parseLong(it.next()));
+ appId.setId(Integer.parseInt(it.next()));
+ ApplicationAttemptId appAttemptId =
+ recordFactory.newRecordInstance(ApplicationAttemptId.class);
+ appAttemptId.setApplicationId(appId);
+ appAttemptId.setAttemptId(Integer.parseInt(it.next()));
+ return appAttemptId;
+ }
+
public static String toString(ContainerId cId) {
return cId.toString();
}
@@ -138,10 +153,11 @@ public class ConverterUtils {
String containerIdStr) {
Iterator<String> it = _split(containerIdStr).iterator();
it.next(); // prefix. TODO: Validate container prefix
- ApplicationId appID = toApplicationId(recordFactory, it);
+ ApplicationAttemptId appAttemptID =
+ toApplicationAttemptId(recordFactory, it);
ContainerId containerId =
recordFactory.newRecordInstance(ContainerId.class);
- containerId.setAppId(appID);
+ containerId.setApplicationAttemptId(appAttemptID);
containerId.setId(Integer.parseInt(it.next()));
return containerId;
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java Tue Sep 13 22:49:27 2011
@@ -97,7 +97,7 @@ public abstract class ResourceCalculator
@InterfaceStability.Unstable
public abstract ProcResourceValues getProcResourceValues();
- public class ProcResourceValues {
+ public static class ProcResourceValues {
private final long cumulativeCpuTime;
private final long physicalMemorySize;
private final long virtualMemorySize;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java Tue Sep 13 22:49:27 2011
@@ -67,7 +67,6 @@ public class WebApps {
boolean findPort = false;
Configuration conf;
boolean devMode = false;
- Module[] modules;
Builder(String name, Class<T> api, T application) {
this.name = name;
@@ -99,11 +98,6 @@ public class WebApps {
return this;
}
- public Builder<T> with(Module... modules) {
- this.modules = modules; // OK
- return this;
- }
-
public Builder<T> inDevMode() {
devMode = true;
return this;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HtmlPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HtmlPage.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HtmlPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HtmlPage.java Tue Sep 13 22:49:27 2011
@@ -26,6 +26,11 @@ import org.apache.hadoop.yarn.webapp.Sub
import org.apache.hadoop.yarn.webapp.WebAppException;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
+/**
+ * The parent class of all HTML pages. Override
+ * {@link #render(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)}
+ * to actually render the page.
+ */
public abstract class HtmlPage extends TextView {
public static class _ implements Hamlet._ {
@@ -79,6 +84,10 @@ public abstract class HtmlPage extends T
}
}
+ /**
+ * Render the the HTML page.
+ * @param html the page to render data to.
+ */
protected abstract void render(Page.HTML<_> html);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java Tue Sep 13 22:49:27 2011
@@ -133,8 +133,12 @@ public class JQueryUI extends HtmlBlock
if (init.isEmpty()) {
init = defaultInit;
}
- list.add(join(" $('#", id, "').dataTable(", init,
+ list.add(join(id,"DataTable = $('#", id, "').dataTable(", init,
").fnSetFilteringDelay(188);"));
+ String postInit = $(postInitID(DATATABLES, id));
+ if(!postInit.isEmpty()) {
+ list.add(postInit);
+ }
}
}
String selector = $(DATATABLES_SELECTOR);
@@ -210,6 +214,10 @@ public class JQueryUI extends HtmlBlock
public static String initID(String name, String id) {
return djoin(name, id, "init");
}
+
+ public static String postInitID(String name, String id) {
+ return djoin(name, id, "postinit");
+ }
public static String initSelector(String name) {
return djoin(name, "selector.init");
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/TwoColumnLayout.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/TwoColumnLayout.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/TwoColumnLayout.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/TwoColumnLayout.java Tue Sep 13 22:49:27 2011
@@ -18,21 +18,25 @@
package org.apache.hadoop.yarn.webapp.view;
-import com.google.common.collect.Lists;
-import com.google.inject.Inject;
-import java.util.List;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-import static org.apache.hadoop.yarn.webapp.Params.*;
+import java.util.List;
import org.apache.hadoop.yarn.webapp.SubView;
+import com.google.common.collect.Lists;
+
/**
- * A simpler two column layout implementation. Works with resizable themes.
+ * A simpler two column layout implementation with a header, a navigation bar
+ * on the left, content on the right, and a footer. Works with resizable themes.
* @see TwoColumnCssLayout
*/
public class TwoColumnLayout extends HtmlPage {
+ /*
+ * (non-Javadoc)
+ * @see org.apache.hadoop.yarn.webapp.view.HtmlPage#render(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
+ */
@Override protected void render(Page.HTML<_> html) {
preHead(html);
html.
@@ -65,28 +69,55 @@ public class TwoColumnLayout extends Htm
_(content())._()._()._()._()._();
}
+ /**
+ * Do what needs to be done before the header is rendered. This usually
+ * involves setting page variables for Javascript and CSS rendering.
+ * @param html the html to use to render.
+ */
protected void preHead(Page.HTML<_> html) {
}
+ /**
+ * Do what needs to be done after the header is rendered.
+ * @param html the html to use to render.
+ */
protected void postHead(Page.HTML<_> html) {
}
+ /**
+ * @return the class that will render the header of the page.
+ */
protected Class<? extends SubView> header() {
return HeaderBlock.class;
}
+ /**
+ * @return the class that will render the content of the page.
+ */
protected Class<? extends SubView> content() {
return LipsumBlock.class;
}
+ /**
+ * @return the class that will render the navigation bar.
+ */
protected Class<? extends SubView> nav() {
return NavBlock.class;
}
+ /**
+ * @return the class that will render the footer.
+ */
protected Class<? extends SubView> footer() {
return FooterBlock.class;
}
+ /**
+ * Sets up a table to be a consistent style.
+ * @param html the HTML to use to render.
+ * @param tableId the ID of the table to set styles on.
+ * @param innerStyles any other styles to add to the table.
+ */
protected void setTableStyles(Page.HTML<_> html, String tableId,
String... innerStyles) {
List<String> styles = Lists.newArrayList();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java Tue Sep 13 22:49:27 2011
@@ -32,12 +32,14 @@ import org.apache.hadoop.yarn.api.protoc
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainerResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -70,7 +72,7 @@ public class TestRPC {
private void test(String rpcClass) throws Exception {
Configuration conf = new Configuration();
- conf.set(YarnRPC.RPC_CLASSNAME, rpcClass);
+ conf.set(YarnConfiguration.IPC_RPC_IMPL, rpcClass);
YarnRPC rpc = YarnRPC.create(conf);
String bindAddr = "localhost:0";
InetSocketAddress addr = NetUtils.createSocketAddr(bindAddr);
@@ -80,21 +82,34 @@ public class TestRPC {
ContainerManager proxy = (ContainerManager)
rpc.getProxy(ContainerManager.class,
NetUtils.createSocketAddr("localhost:" + server.getPort()), conf);
- ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class);
+ ContainerLaunchContext containerLaunchContext =
+ recordFactory.newRecordInstance(ContainerLaunchContext.class);
containerLaunchContext.setUser("dummy-user");
- containerLaunchContext.setContainerId(recordFactory.newRecordInstance(ContainerId.class));
- containerLaunchContext.getContainerId().setAppId(recordFactory.newRecordInstance(ApplicationId.class));
- containerLaunchContext.getContainerId().getAppId().setId(0);
- containerLaunchContext.getContainerId().setId(100);
- containerLaunchContext.setResource(recordFactory.newRecordInstance(Resource.class));
+ ContainerId containerId =
+ recordFactory.newRecordInstance(ContainerId.class);
+ ApplicationId applicationId =
+ recordFactory.newRecordInstance(ApplicationId.class);
+ ApplicationAttemptId applicationAttemptId =
+ recordFactory.newRecordInstance(ApplicationAttemptId.class);
+ applicationId.setClusterTimestamp(0);
+ applicationId.setId(0);
+ applicationAttemptId.setApplicationId(applicationId);
+ applicationAttemptId.setAttemptId(0);
+ containerId.setApplicationAttemptId(applicationAttemptId);
+ containerId.setId(100);
+ containerLaunchContext.setContainerId(containerId);
+ containerLaunchContext.setResource(
+ recordFactory.newRecordInstance(Resource.class));
// containerLaunchContext.env = new HashMap<CharSequence, CharSequence>();
// containerLaunchContext.command = new ArrayList<CharSequence>();
- StartContainerRequest scRequest = recordFactory.newRecordInstance(StartContainerRequest.class);
+ StartContainerRequest scRequest =
+ recordFactory.newRecordInstance(StartContainerRequest.class);
scRequest.setContainerLaunchContext(containerLaunchContext);
proxy.startContainer(scRequest);
- GetContainerStatusRequest gcsRequest = recordFactory.newRecordInstance(GetContainerStatusRequest.class);
+ GetContainerStatusRequest gcsRequest =
+ recordFactory.newRecordInstance(GetContainerStatusRequest.class);
gcsRequest.setContainerId(containerLaunchContext.getContainerId());
GetContainerStatusResponse response = proxy.getContainerStatus(gcsRequest);
ContainerStatus status = response.getStatus();
@@ -117,7 +132,7 @@ public class TestRPC {
server.close();
Assert.assertNotNull(status);
- Assert.assertEquals(ContainerState.RUNNING, status.getState().RUNNING);
+ Assert.assertEquals(ContainerState.RUNNING, status.getState());
}
public class DummyContainerManager implements ContainerManager {
@@ -125,28 +140,35 @@ public class TestRPC {
private ContainerStatus status = null;
@Override
- public GetContainerStatusResponse getContainerStatus(GetContainerStatusRequest request) throws YarnRemoteException {
- GetContainerStatusResponse response = recordFactory.newRecordInstance(GetContainerStatusResponse.class);
+ public GetContainerStatusResponse getContainerStatus(
+ GetContainerStatusRequest request)
+ throws YarnRemoteException {
+ GetContainerStatusResponse response =
+ recordFactory.newRecordInstance(GetContainerStatusResponse.class);
response.setStatus(status);
return response;
}
@Override
- public StartContainerResponse startContainer(StartContainerRequest request) throws YarnRemoteException {
+ public StartContainerResponse startContainer(StartContainerRequest request)
+ throws YarnRemoteException {
ContainerLaunchContext container = request.getContainerLaunchContext();
- StartContainerResponse response = recordFactory.newRecordInstance(StartContainerResponse.class);
+ StartContainerResponse response =
+ recordFactory.newRecordInstance(StartContainerResponse.class);
status = recordFactory.newRecordInstance(ContainerStatus.class);
status.setState(ContainerState.RUNNING);
status.setContainerId(container.getContainerId());
- status.setExitStatus(String.valueOf(0));
+ status.setExitStatus(0);
return response;
}
@Override
- public StopContainerResponse stopContainer(StopContainerRequest request) throws YarnRemoteException {
+ public StopContainerResponse stopContainer(StopContainerRequest request)
+ throws YarnRemoteException {
Exception e = new Exception(EXCEPTION_MSG,
new Exception(EXCEPTION_CAUSE));
- throw YarnRemoteExceptionFactoryProvider.getYarnRemoteExceptionFactory(null).createYarnRemoteException(e);
+ throw YarnRemoteExceptionFactoryProvider
+ .getYarnRemoteExceptionFactory(null).createYarnRemoteException(e);
}
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java Tue Sep 13 22:49:27 2011
@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RpcClientFactory;
import org.apache.hadoop.yarn.factories.RpcServerFactory;
import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl;
@@ -42,7 +43,7 @@ public class TestRpcFactoryProvider {
Assert.assertEquals(RpcClientFactoryPBImpl.class, clientFactory.getClass());
Assert.assertEquals(RpcServerFactoryPBImpl.class, serverFactory.getClass());
- conf.set(RpcFactoryProvider.RPC_SERIALIZER_KEY, "writable");
+ conf.set(YarnConfiguration.IPC_SERIALIZER_TYPE, "writable");
try {
clientFactory = RpcFactoryProvider.getClientFactory(conf);
Assert.fail("Expected an exception - unknown serializer");
@@ -55,8 +56,8 @@ public class TestRpcFactoryProvider {
}
conf = new Configuration();
- conf.set(RpcFactoryProvider.RPC_CLIENT_FACTORY_CLASS_KEY, "NonExistantClass");
- conf.set(RpcFactoryProvider.RPC_SERVER_FACTORY_CLASS_KEY, RpcServerFactoryPBImpl.class.getName());
+ conf.set(YarnConfiguration.IPC_CLIENT_FACTORY, "NonExistantClass");
+ conf.set(YarnConfiguration.IPC_SERVER_FACTORY, RpcServerFactoryPBImpl.class.getName());
try {
clientFactory = RpcFactoryProvider.getClientFactory(conf);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/NodeHealthCheckerService.java Tue Sep 13 22:49:27 2011
@@ -32,6 +32,7 @@ import org.apache.hadoop.util.Shell.Exit
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.service.AbstractService;
/**
@@ -62,28 +63,9 @@ public class NodeHealthCheckerService ex
/** Pattern used for searching in the output of the node health script */
static private final String ERROR_PATTERN = "ERROR";
- /* Configuration keys */
- public static final String HEALTH_CHECK_SCRIPT_PROPERTY =
- "yarn.server.nodemanager.healthchecker.script.path";
-
- public static final String HEALTH_CHECK_INTERVAL_PROPERTY =
- "yarn.server.nodemanager.healthchecker.interval";
-
- public static final String HEALTH_CHECK_FAILURE_INTERVAL_PROPERTY =
- "yarn.server.nodemanager.healthchecker.script.timeout";
-
- public static final String HEALTH_CHECK_SCRIPT_ARGUMENTS_PROPERTY =
- "yarn.server.nodemanager.healthchecker.script.args";
-
- /* end of configuration keys */
/** Time out error message */
static final String NODE_HEALTH_SCRIPT_TIMED_OUT_MSG = "Node health script timed out";
- /** Default frequency of running node health script */
- private static final long DEFAULT_HEALTH_CHECK_INTERVAL = 10 * 60 * 1000;
- /** Default script time out period */
- private static final long DEFAULT_HEALTH_SCRIPT_FAILURE_INTERVAL = 2 * DEFAULT_HEALTH_CHECK_INTERVAL;
-
private boolean isHealthy;
private String healthReport;
@@ -224,13 +206,13 @@ public class NodeHealthCheckerService ex
public void init(Configuration conf) {
this.conf = conf;
this.nodeHealthScript =
- conf.get(HEALTH_CHECK_SCRIPT_PROPERTY);
- this.intervalTime = conf.getLong(HEALTH_CHECK_INTERVAL_PROPERTY,
- DEFAULT_HEALTH_CHECK_INTERVAL);
+ conf.get(YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_PATH);
+ this.intervalTime = conf.getLong(YarnConfiguration.NM_HEALTH_CHECK_INTERVAL_MS,
+ YarnConfiguration.DEFAULT_NM_HEALTH_CHECK_INTERVAL_MS);
this.scriptTimeout = conf.getLong(
- HEALTH_CHECK_FAILURE_INTERVAL_PROPERTY,
- DEFAULT_HEALTH_SCRIPT_FAILURE_INTERVAL);
- String[] args = conf.getStrings(HEALTH_CHECK_SCRIPT_ARGUMENTS_PROPERTY,
+ YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_TIMEOUT_MS,
+ YarnConfiguration.DEFAULT_NM_HEALTH_CHECK_SCRIPT_TIMEOUT_MS);
+ String[] args = conf.getStrings(YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_OPTS,
new String[] {});
timer = new NodeHealthMonitorExecutor(args);
}
@@ -340,7 +322,7 @@ public class NodeHealthCheckerService ex
*/
public static boolean shouldRun(Configuration conf) {
String nodeHealthScript =
- conf.get(HEALTH_CHECK_SCRIPT_PROPERTY);
+ conf.get(YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_PATH);
if (nodeHealthScript == null || nodeHealthScript.trim().isEmpty()) {
return false;
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/RMNMSecurityInfoClass.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/RMNMSecurityInfoClass.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/RMNMSecurityInfoClass.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/RMNMSecurityInfoClass.java Tue Sep 13 22:49:27 2011
@@ -43,12 +43,12 @@ public class RMNMSecurityInfoClass exten
@Override
public String serverPrincipal() {
- return YarnConfiguration.RM_SERVER_PRINCIPAL_KEY;
+ return YarnConfiguration.RM_PRINCIPAL;
}
@Override
public String clientPrincipal() {
- return YarnServerConfig.NM_SERVER_PRINCIPAL_KEY;
+ return YarnConfiguration.NM_PRINCIPAL;
}
};
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java Tue Sep 13 22:49:27 2011
@@ -18,10 +18,8 @@
package org.apache.hadoop.yarn.server.api.records;
import java.util.List;
-import java.util.Map;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
@@ -31,17 +29,13 @@ public interface NodeStatus {
public abstract NodeId getNodeId();
public abstract int getResponseId();
- public abstract Map<ApplicationId, List<Container>> getAllContainers();
- public abstract List<Container> getContainers(ApplicationId key);
+ public abstract List<ContainerStatus> getContainersStatuses();
+ public abstract void setContainersStatuses(
+ List<ContainerStatus> containersStatuses);
NodeHealthStatus getNodeHealthStatus();
void setNodeHealthStatus(NodeHealthStatus healthStatus);
public abstract void setNodeId(NodeId nodeId);
public abstract void setResponseId(int responseId);
-
- public abstract void addAllContainers(Map<ApplicationId, List<Container>> containers);
- public abstract void setContainers(ApplicationId key, List<Container> containers);
- public abstract void removeContainers(ApplicationId key);
- public abstract void clearContainers();
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java Tue Sep 13 22:49:27 2011
@@ -20,27 +20,19 @@ package org.apache.hadoop.yarn.server.ap
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
-import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl;
+import org.apache.hadoop.yarn.api.records.impl.pb.ContainerStatusPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeHealthStatusPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
-import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
-import org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeHealthStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
-import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.ApplicationIdContainerListMapProto;
-import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.ContainerListProto;
import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProto;
import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder;
import org.apache.hadoop.yarn.server.api.records.NodeStatus;
@@ -51,7 +43,7 @@ public class NodeStatusPBImpl extends Pr
boolean viaProto = false;
private NodeId nodeId = null;
- private Map<ApplicationIdProto, List<Container>> containers = null;
+ private List<ContainerStatus> containers = null;
private NodeHealthStatus nodeHealthStatus = null;
public NodeStatusPBImpl() {
@@ -99,7 +91,40 @@ public class NodeStatusPBImpl extends Pr
viaProto = false;
}
+ private void addContainersToProto() {
+ maybeInitBuilder();
+ builder.clearContainersStatuses();
+ if (containers == null)
+ return;
+ Iterable<ContainerStatusProto> iterable = new Iterable<ContainerStatusProto>() {
+ @Override
+ public Iterator<ContainerStatusProto> iterator() {
+ return new Iterator<ContainerStatusProto>() {
+
+ Iterator<ContainerStatus> iter = containers.iterator();
+
+ @Override
+ public boolean hasNext() {
+ return iter.hasNext();
+ }
+
+ @Override
+ public ContainerStatusProto next() {
+ return convertToProtoFormat(iter.next());
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+
+ }
+ };
+ }
+ };
+ builder.addAllContainersStatuses(iterable);
+ }
+
@Override
public int getResponseId() {
NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
@@ -133,24 +158,17 @@ public class NodeStatusPBImpl extends Pr
}
@Override
- public Map<ApplicationId, List<Container>> getAllContainers() {
+ public List<ContainerStatus> getContainersStatuses() {
initContainers();
- HashMap<ApplicationId, List<Container>> returnMap = new HashMap<ApplicationId, List<Container>>(
- this.containers.size());
- for (Entry<ApplicationIdProto, List<Container>> entry : this.containers.entrySet()) {
- returnMap.put(convertFromProtoFormat(entry.getKey()), entry.getValue());
- }
- return returnMap;
+ return this.containers;
}
@Override
- public List<Container> getContainers(ApplicationId applicationId) {
- initContainers();
- ApplicationIdProto applicationIdProto = convertToProtoFormat(applicationId);
- if (this.containers.get(applicationIdProto) == null) {
- this.containers.put(applicationIdProto, new ArrayList<Container>());
+ public void setContainersStatuses(List<ContainerStatus> containers) {
+ if (containers == null) {
+ builder.clearContainersStatuses();
}
- return this.containers.get(applicationIdProto);
+ this.containers = containers;
}
private void initContainers() {
@@ -158,60 +176,16 @@ public class NodeStatusPBImpl extends Pr
return;
}
NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
- List<ApplicationIdContainerListMapProto> list = p.getContainersList();
- this.containers = new HashMap<ApplicationIdProto, List<Container>>();
+ List<ContainerStatusProto> list = p.getContainersStatusesList();
+ this.containers = new ArrayList<ContainerStatus>();
- for (ApplicationIdContainerListMapProto c : list) {
- this.containers.put(c.getApplicationId(), convertFromProtoFormat(c.getValue()));
+ for (ContainerStatusProto c : list) {
+ this.containers.add(convertFromProtoFormat(c));
}
}
@Override
- public void addAllContainers(final Map<ApplicationId, List<Container>> containers) {
- if (containers == null)
- return;
- initContainers();
- for (Entry<ApplicationId, List<Container>> entry : containers.entrySet()) {
- this.containers.put(convertToProtoFormat(entry.getKey()), entry.getValue());
- }
- }
-
- private void addContainersToProto() {
- maybeInitBuilder();
- builder.clearContainers();
- viaProto = false;
- Iterable<ApplicationIdContainerListMapProto> iterable = new Iterable<ApplicationIdContainerListMapProto>() {
-
- @Override
- public Iterator<ApplicationIdContainerListMapProto> iterator() {
- return new Iterator<ApplicationIdContainerListMapProto>() {
-
- Iterator<ApplicationIdProto> keyIter = containers.keySet().iterator();
- @Override
- public boolean hasNext() {
- return keyIter.hasNext();
- }
-
- @Override
- public ApplicationIdContainerListMapProto next() {
- ApplicationIdProto applicationIdProto = keyIter.next();
- return ApplicationIdContainerListMapProto.newBuilder().setApplicationId(applicationIdProto).setValue(convertToProtoFormat(containers.get(applicationIdProto))).build();
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
-
- };
- }
-
- };
- builder.addAllContainers(iterable);
- }
-
- @Override
public NodeHealthStatus getNodeHealthStatus() {
NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
if (nodeHealthStatus != null) {
@@ -233,66 +207,6 @@ public class NodeStatusPBImpl extends Pr
this.nodeHealthStatus = healthStatus;
}
- /*
- *
- * @Override
- public String getApplicationName() {
- ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder;
- if (!p.hasApplicationName()) {
- return null;
- }
- return (p.getApplicationName());
- }
-
- @Override
- public void setApplicationName(String applicationName) {
- maybeInitBuilder();
- if (applicationName == null) {
- builder.clearApplicationName();
- return;
- }
- builder.setApplicationName((applicationName));
- }
- */
-
- private ContainerListProto convertToProtoFormat(List<Container> src) {
- ContainerListProto.Builder ret = ContainerListProto.newBuilder();
- for (Container c : src) {
- ret.addContainer(((ContainerPBImpl)c).getProto());
- }
- return ret.build();
- }
-
- private List<Container> convertFromProtoFormat(ContainerListProto src) {
- List<Container> ret = new ArrayList<Container>();
- for (ContainerProto c : src.getContainerList()) {
- ret.add(convertFromProtoFormat(c));
- }
- return ret;
- }
-
- private Container convertFromProtoFormat(ContainerProto src) {
- return new ContainerPBImpl(src);
- }
-
- @Override
- public void setContainers(ApplicationId applicationId, List<Container> containers) {
- initContainers();
- this.containers.put(convertToProtoFormat(applicationId), containers);
- }
-
- @Override
- public void removeContainers(ApplicationId applicationId) {
- initContainers();
- this.containers.remove(convertToProtoFormat(applicationId));
- }
-
- @Override
- public void clearContainers() {
- initContainers();
- this.containers.clear();
- }
-
private NodeIdProto convertToProtoFormat(NodeId nodeId) {
return ((NodeIdPBImpl)nodeId).getProto();
}
@@ -301,14 +215,6 @@ public class NodeStatusPBImpl extends Pr
return new NodeIdPBImpl(proto);
}
- private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) {
- return ((ApplicationIdPBImpl)applicationId).getProto();
- }
-
- private ApplicationId convertFromProtoFormat(ApplicationIdProto proto) {
- return new ApplicationIdPBImpl(proto);
- }
-
private NodeHealthStatusProto convertToProtoFormat(
NodeHealthStatus healthStatus) {
return ((NodeHealthStatusPBImpl) healthStatus).getProto();
@@ -317,4 +223,12 @@ public class NodeStatusPBImpl extends Pr
private NodeHealthStatus convertFromProtoFormat(NodeHealthStatusProto proto) {
return new NodeHealthStatusPBImpl(proto);
}
+
+ private ContainerStatusPBImpl convertFromProtoFormat(ContainerStatusProto c) {
+ return new ContainerStatusPBImpl(c);
+ }
+
+ private ContainerStatusProto convertToProtoFormat(ContainerStatus c) {
+ return ((ContainerStatusPBImpl)c).getProto();
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto Tue Sep 13 22:49:27 2011
@@ -26,7 +26,7 @@ import "yarn_protos.proto";
message NodeStatusProto {
optional NodeIdProto node_id = 1;
optional int32 response_id = 2;
- repeated ApplicationIdContainerListMapProto containers = 3;
+ repeated ContainerStatusProto containersStatuses = 3;
optional NodeHealthStatusProto nodeHealthStatus = 4;
}
@@ -41,12 +41,3 @@ message HeartbeatResponseProto {
repeated ApplicationIdProto applications_to_cleanup = 4;
}
-message ContainerListProto {
- repeated ContainerProto container = 1;
-}
-
-message ApplicationIdContainerListMapProto {
- optional ApplicationIdProto application_id = 1;
- optional ContainerListProto value = 2;
-}
-
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml Tue Sep 13 22:49:27 2011
@@ -1,212 +1,367 @@
<?xml version="1.0"?>
<configuration>
+
+ <!-- IPC Configs -->
+ <property>
+ <description>Factory to create client IPC classes.</description>
+ <name>yarn.ipc.client.factory.class</name>
+ </property>
+
+ <property>
+ <description>Type of serialization to use.</description>
+ <name>yarn.ipc.serializer.type</name>
+ <value>protocolbuffers</value>
+ </property>
+
+ <property>
+ <description>Factory to create server IPC classes.</description>
+ <name>yarn.ipc.server.factory.class</name>
+ </property>
+
+ <property>
+ <description>Factory to create IPC exceptions.</description>
+ <name>yarn.ipc.exception.factory.class</name>
+ </property>
+
+ <property>
+ <description>Factory to create serializeable records.</description>
+ <name>yarn.ipc.record.factory.class</name>
+ </property>
+
+ <property>
+ <description>RPC class implementation</description>
+ <name>yarn.ipc.rpc.class</name>
+ <value>org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC</value>
+ </property>
+
+ <!-- Resource Manager Configs -->
+ <property>
+ <description>The address of the applications manager interface in the RM.</description>
+ <name>yarn.resourcemanager.address</name>
+ <value>0.0.0.0:8040</value>
+ </property>
+
+ <property>
+ <description>The number of threads used to handle applications manager requests.</description>
+ <name>yarn.resourcemanager.client.thread-count</name>
+ <value>10</value>
+ </property>
+
+ <property>
+ <description>The expiry interval for application master reporting.</description>
+ <name>yarn.resourcemanager.am.liveness-monitor.expiry-interval-ms</name>
+ <value>600000</value>
+ </property>
- <property>
+ <property>
+ <description>The Kerberos principal for the resource manager.</description>
<name>yarn.resourcemanager.principal</name>
<value>rm/sightbusy-lx@LOCALHOST</value>
</property>
- <property>
- <name>yarn.nodemanager.principal</name>
- <value>nm/sightbusy-lx@LOCALHOST</value>
+ <property>
+ <description>The address of the scheduler interface.</description>
+ <name>yarn.resourcemanager.scheduler.address</name>
+ <value>0.0.0.0:8030</value>
</property>
-
-<!-- All resourcemanager related configuration properties -->
+ <property>
+ <description>Number of threads to handle scheduler interface.</description>
+ <name>yarn.resourcemanager.scheduler.client.thread-count</name>
+ <value>10</value>
+ </property>
<property>
- <name>yarn.server.resourcemanager.address</name>
- <value>0.0.0.0:8020</value>
+ <description>The address of the RM web application.</description>
+ <name>yarn.resourcemanager.webapp.address</name>
+ <value>0.0.0.0:8088</value>
</property>
- <property>
- <name>yarn.server.resourcemanager.resourcetracker.address</name>
+ <property>
+ <name>yarn.resourcemanager.resource-tracker.address</name>
<value>0.0.0.0:8025</value>
</property>
- <property>
- <name>yarn.server.resourcemanager.scheduler.address</name>
- <value>0.0.0.0:8030</value>
+ <property>
+ <description>Are RM acls enabled.</description>
+ <name>yarn.resourcemanager.acl.enable</name>
+ <value>false</value>
+ </property>
+
+ <property>
+ <description>ACL of who can be admin of RM.</description>
+ <name>yarn.resourcemanager.admin.acl</name>
+ <value>*</value>
</property>
- <property>
- <name>yarn.server.resourcemanager.admin.address</name>
+ <property>
+ <description>The address of the RM admin interface.</description>
+ <name>yarn.resourcemanager.admin.address</name>
<value>0.0.0.0:8141</value>
</property>
<property>
- <name>yarn.server.resourcemanager.application.max.retries</name>
+ <description>Number of threads used to handle RM admin interface.</description>
+ <name>yarn.resourcemanager.admin.client.thread-count</name>
+ <value>1</value>
+ </property>
+
+ <property>
+ <description>How often should the RM check that the AM is still alive.</description>
+ <name>yarn.resourcemanager.amliveliness-monitor.interval-ms</name>
+ <value>1000</value>
+ </property>
+
+ <property>
+ <description>The maximum number of application master retries.</description>
+ <name>yarn.resourcemanager.am.max-retries</name>
<value>1</value>
- <description>The number of times an application will be retried in case
- of AM failure.</description>
</property>
+
+ <property>
+ <description>How often to check that containers are still alive. </description>
+ <name>yarn.resourcemanager.container.liveness-monitor.interval-ms</name>
+ <value>600000</value>
+ </property>
+
<property>
- <name>yarn.server.resourcemanager.keytab</name>
+ <description>The keytab for the resource manager.</description>
+ <name>yarn.resourcemanager.keytab</name>
<value>/etc/krb5.keytab</value>
</property>
<property>
- <name>yarn.server.resourcemanager.expire.applications.completed.max</name>
- <value>10000</value>
- <description>the maximum number of completed applications the RM
- keeps in memory
- </description>
+ <description>How long to wait until a node manager is considered dead.</description>
+ <name>yarn.resourcemanager.nm.liveness-monitor.expiry-interval-ms</name>
+ <value>600000</value>
</property>
-<!-- All nodemanager related configuration properties -->
+ <property>
+ <description>How often to check that node managers are still alive.</description>
+ <name>yarn.resourcemanager.nm.liveness-monitor.interval-ms</name>
+ <value>1000</value>
+ </property>
<property>
- <name>yarn.server.nodemanager.local-dir</name>
- <value>/tmp/nm-local-dir</value>
+ <description>Path to file with nodes to include.</description>
+ <name>yarn.resourcemanager.nodes.include-path</name>
+ <value></value>
</property>
<property>
- <name>yarn.server.nodemanager.log.dir</name>
- <value>/tmp/logs</value>
+ <description>Path to file with nodes to exclude.</description>
+ <name>yarn.resourcemanager.nodes.exclude-path</name>
+ <value></value>
</property>
<property>
- <name>yarn.apps.stagingDir</name>
- <value>/tmp/hadoop-yarn/${user.name}/staging</value>
- </property>
+ <description>Number of threads to handle resource tracker calls.</description>
+ <name>yarn.resourcemanager.resource-tracker.client.thread-count</name>
+ <value>10</value>
+ </property>
<property>
- <name>yarn.apps.history.stagingDir</name>
- <value>/tmp/hadoop-yarn/${user.name}/staging</value>
- </property>
+ <description>The class to use as the resource scheduler.</description>
+ <name>yarn.resourcemanager.scheduler.class</name>
+ </property>
<property>
- <name>yarn.server.nodemanager.keytab</name>
- <value>/etc/krb5.keytab</value>
+ <description>The class to use as the persistent store.</description>
+ <name>yarn.resourcemanager.store.class</name>
</property>
<property>
- <name>yarn.server.nodemanager.container-executor.class</name>
- <value>org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor</value>
- <!--<value>org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor</value>-->
+ <description>The address of the zookeeper instance to use with ZK store.</description>
+ <name>yarn.resourcemanager.zookeeper-store.address</name>
+ </property>
+
+ <property>
+ <description>The zookeeper session timeout for the zookeeper store.</description>
+ <name>yarn.resourcemanager.zookeeper-store.session.timeout-ms</name>
+ <value>60000</value>
</property>
- <property><name>NM_HOSTS</name><value>0.0.0.0:45454</value></property>
+ <property>
+ <description>The maximum number of completed applications RM keeps. </description>
+ <name>yarn.resourcemanager.max-completed-applications</name>
+ <value>10000</value>
+ </property>
+ <!-- Node Manager Configs -->
<property>
- <name>yarn.server.nodemanager.address</name>
+ <description>address of node manager IPC.</description>
+ <name>yarn.nodemanager.address</name>
<value>0.0.0.0:45454</value>
</property>
- <!-- HealthChecker's properties -->
<property>
- <name>yarn.server.nodemanager.healthchecker.script.path</name>
- <value></value>
- <description>Location of the node's health-check script on the local
- file-system.
- </description>
+ <description>who will execute(launch) the containers.</description>
+ <name>yarn.nodemanager.container-executor.class</name>
+ <value>org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor</value>
+<!--<value>org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor</value>-->
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.interval</name>
- <value>600000</value>
- <description>Frequency of the health-check run by the NodeManager
- </description>
+ <description>Number of threads container manager uses.</description>
+ <name>yarn.nodemanager.container-manager.thread-count</name>
+ <value>5</value>
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.script.timeout</name>
- <value>1200000</value>
- <description>Timeout for the health-check run by the NodeManager
- </description>
+ <description>Number of threads used in cleanup.</description>
+ <name>yarn.nodemanager.delete.thread-count</name>
+ <value>4</value>
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.script.args</name>
- <value></value>
- <description>Arguments to be passed to the health-check script run
- by the NodeManager</description>
+ <description>Heartbeat interval to RM</description>
+ <name>yarn.nodemanager.heartbeat.interval-ms</name>
+ <value>1000</value>
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.script.path</name>
- <value></value>
- <description>Location of the node's health-check script on the local
- file-system.
- </description>
+ <description>Keytab for NM.</description>
+ <name>yarn.nodemanager.keytab</name>
+ <value>/etc/krb5.keytab</value>
+ </property>
+
+ <property>
+ <description>List of directories to store localized files in.</description>
+ <name>yarn.nodemanager.local-dirs</name>
+ <value>/tmp/nm-local-dir</value>
+ </property>
+
+ <property>
+ <description>Address where the localizer IPC is.</description>
+ <name>yarn.nodemanager.localizer.address</name>
+ <value>0.0.0.0:4344</value>
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.interval</name>
+ <description>Interval in between cache cleanups.</description>
+ <name>yarn.nodemanager.localizer.cache.cleanup.interval-ms</name>
<value>600000</value>
- <description>Frequency of the health-check run by the NodeManager
- </description>
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.script.timeout</name>
- <value>1200000</value>
- <description>Timeout for the health-check run by the NodeManager
- </description>
+ <description>Target size of localizer cache in MB, per local directory.</description>
+ <name>yarn.nodemanager.localizer.cache.target-size-mb</name>
+ <value>10240</value>
</property>
<property>
- <name>yarn.server.nodemanager.healthchecker.script.args</name>
- <value></value>
- <description>Arguments to be passed to the health-check script run
- by the NodeManager</description>
+ <description>Number of threads to handle localization requests.</description>
+ <name>yarn.nodemanager.localizer.client.thread-count</name>
+ <value>5</value>
+ </property>
+
+ <property>
+ <description>Number of threads to use for localization fetching.</description>
+ <name>yarn.nodemanager.localizer.fetch.thread-count</name>
+ <value>4</value>
</property>
- <!-- End of HealthChecker's properties -->
- <!-- ContainerMonitor related properties -->
+ <property>
+ <description>Where to store container logs.</description>
+ <name>yarn.nodemanager.log-dirs</name>
+ <value>/tmp/logs</value>
+ </property>
<property>
- <name>yarn.server.nodemanager.containers-monitor.monitoring-interval</name>
- <value>3000</value>
+ <description>Where to aggregate logs to.</description>
+ <name>yarn.nodemanager.remote-app-log-dir</name>
+ <value>/tmp/logs</value>
</property>
<property>
- <name>yarn.server.nodemanager.containers-monitor.resourcecalculatorplugin</name>
- <value></value>
+ <description>Amount of memory in GB that can be allocated for containers.</description>
+ <name>yarn.nodemanager.resource.memory-gb</name>
+ <value>8</value>
</property>
- <property>
- <name>yarn.server.nodemanager.reserved-physical-memory.mb</name>
- <value>-1</value>
- </property>
+ <property>
+ <description>NM Webapp address.</description>
+ <name>yarn.nodemanager.webapp.address</name>
+ <value>0.0.0.0:9999</value>
+ </property>
+
+ <property>
+ <description>How often to monitor containers.</description>
+ <name>yarn.nodemanager.container-monitor.interval-ms</name>
+ <value>3000</value>
+ </property>
- <!-- End of ContainerMonitor related properties -->
+ <property>
+ <description>Class that calculates containers current resource utilization.</description>
+ <name>yarn.nodemanager.container-monitor.resource-calculator.class</name>
+ </property>
-<!-- All MRAppMaster related configuration properties -->
+ <property>
+ <description>Amount of physical ram to reserve for other applications, -1 disables.</description>
+ <name>yarn.nodemanager.reserved.memory-mb</name>
+ <value>-1</value>
+ </property>
<property>
- <name>yarn.server.mapreduce-appmanager.attempt-listener.bindAddress</name>
- <value>0.0.0.0</value>
+ <description>Frequency of running node health script.</description>
+ <name>yarn.nodemanager.health-checker.interval-ms</name>
+ <value>600000</value>
</property>
<property>
- <name>yarn.server.mapreduce-appmanager.client-service.bindAddress</name>
- <value>0.0.0.0</value>
+ <description>Script time out period.</description>
+ <name>yarn.nodemanager.health-checker.script.timeout-ms</name>
+ <value>1200000</value>
</property>
+ <property>
+ <description>The health check script to run.</description>
+ <name>yarn.nodemanager.health-checker.script.path</name>
+ <value></value>
+ </property>
<property>
- <name>mapreduce.job.jar</name>
+ <description>The arguments to pass to the health check script.</description>
+ <name>yarn.nodemanager.health-checker.script.opts</name>
<value></value>
- <!--<value>~/Workspace/eclipse-workspace/yarn/yarn-mapreduce/yarn-mapreduce-app/target/yarn-mapreduce-app-0.24.0-SNAPSHOT.jar</value>-->
</property>
<property>
- <name>mapreduce.job.hdfs-servers</name>
- <value>${fs.default.name}</value>
- </property>
+ <description>The path to the Linux container executor.</description>
+ <name>yarn.nodemanager.linux-container-executor.path</name>
+ </property>
+
+ <property>
+ <description>T-file compression types used to compress aggregated logs.</description>
+ <name>yarn.nodemanager.log-aggregation.compression-type</name>
+ <value>none</value>
+ </property>
+
+ <property>
+ <description>The kerberos principal for the node manager.</description>
+ <name>yarn.nodemanager.principal</name>
+ <value>nm/sightbusy-lx@LOCALHOST</value>
+ </property>
+
+ <property>
+ <name>yarn.nodemanager.aux-services</name>
+ <value></value>
+ <!-- <value>mapreduce.shuffle</value> -->
+ </property>
+ <!--Map Reduce configuration-->
<property>
- <name>nodemanager.auxiluary.services</name>
- <value></value>
- <!-- <value>mapreduce.shuffle</value> -->
+ <name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
+ <value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
-<!--
<property>
- <name>nodemanager.aux.service.mapreduce.shuffle.class</name>
- <value>org.apache.hadoop.mapred.ShuffleHandler</value>
+ <name>mapreduce.job.jar</name>
+ <value/>
</property>
--->
+ <property>
+ <name>mapreduce.job.hdfs-servers</name>
+ <value>${fs.default.name}</value>
+ </property>
</configuration>
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/TestNodeHealthService.java Tue Sep 13 22:49:27 2011
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.junit.After;
@@ -66,11 +67,11 @@ public class TestNodeHealthService {
private Configuration getConfForNodeHealthScript() {
Configuration conf = new Configuration();
- conf.set(NodeHealthCheckerService.HEALTH_CHECK_SCRIPT_PROPERTY,
+ conf.set(YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_PATH,
nodeHealthscriptFile.getAbsolutePath());
- conf.setLong(NodeHealthCheckerService.HEALTH_CHECK_INTERVAL_PROPERTY, 500);
+ conf.setLong(YarnConfiguration.NM_HEALTH_CHECK_INTERVAL_MS, 500);
conf.setLong(
- NodeHealthCheckerService.HEALTH_CHECK_FAILURE_INTERVAL_PROPERTY, 1000);
+ YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_TIMEOUT_MS, 1000);
return conf;
}
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/c/container-executor/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Sep 13 22:49:27 2011
@@ -0,0 +1,11 @@
+configure
+Makefile.in
+config.log
+config.status
+depcomp
+compile
+missing
+Makefile
+aclocal.m4
+container-executor
+install-sh
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/c/container-executor/impl/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Sep 13 22:49:27 2011
@@ -0,0 +1,2 @@
+.dirstamp
+.deps
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java Tue Sep 13 22:49:27 2011
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.permission.F
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerDiagnosticsUpdateEvent;
@@ -96,9 +97,11 @@ public class DefaultContainerExecutor ex
// create container dirs on all disks
String containerIdStr = ConverterUtils.toString(containerId);
String appIdStr =
- ConverterUtils.toString(container.getContainerID().getAppId());
+ ConverterUtils.toString(
+ container.getContainerID().getApplicationAttemptId().
+ getApplicationId());
String[] sLocalDirs =
- getConf().getStrings(NMConfig.NM_LOCAL_DIR, NMConfig.DEFAULT_NM_LOCAL_DIR);
+ getConf().getStrings(YarnConfiguration.NM_LOCAL_DIRS, YarnConfiguration.DEFAULT_NM_LOCAL_DIRS);
for (String sLocalDir : sLocalDirs) {
Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE);
Path userdir = new Path(usersdir, userName);
@@ -358,7 +361,7 @@ public class DefaultContainerExecutor ex
throws IOException {
String[] rootLogDirs =
getConf()
- .getStrings(NMConfig.NM_LOG_DIR, NMConfig.DEFAULT_NM_LOG_DIR);
+ .getStrings(YarnConfiguration.NM_LOG_DIRS, YarnConfiguration.DEFAULT_NM_LOG_DIRS);
boolean appLogDirStatus = false;
FsPermission appLogDirPerms = new FsPermission(LOGDIR_PERM);
@@ -386,7 +389,7 @@ public class DefaultContainerExecutor ex
throws IOException {
String[] rootLogDirs =
getConf()
- .getStrings(NMConfig.NM_LOG_DIR, NMConfig.DEFAULT_NM_LOG_DIR);
+ .getStrings(YarnConfiguration.NM_LOG_DIRS, YarnConfiguration.DEFAULT_NM_LOG_DIRS);
boolean containerLogDirStatus = false;
FsPermission containerLogDirPerms = new FsPermission(LOGDIR_PERM);