You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/05/12 13:26:27 UTC

svn commit: r1102243 - in /hadoop/mapreduce/branches/MR-279: ./ mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ yarn/yarn-api...

Author: vinodkv
Date: Thu May 12 11:26:26 2011
New Revision: 1102243

URL: http://svn.apache.org/viewvc?rev=1102243&view=rev
Log:
Fix a race in MR task that was causing MR containers to overwrite each other's job.xml. Also fix leaking attempt-dirs in app-local-dir. Contributed by Vinod Kumar Vavilapalli.

Modified:
    hadoop/mapreduce/branches/MR-279/CHANGES.txt
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnOutputFiles.java
    hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
    hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java

Modified: hadoop/mapreduce/branches/MR-279/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/CHANGES.txt?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/CHANGES.txt (original)
+++ hadoop/mapreduce/branches/MR-279/CHANGES.txt Thu May 12 11:26:26 2011
@@ -3,7 +3,10 @@ Hadoop MapReduce Change Log
 Trunk (unreleased changes)
 
   MAPREDUCE-279
-    Minor fix for install instructions.
+    Fix a race in MR task that was causing MR containers to overwrite each
+    other's job.xml. Also fix leaking attempt-dirs in app-local-dir. (vinodkv)
+
+    Minor fix for install instructions. (mahadev)
     
     Add license header and minor cleanup in history server. (Siddharth Seth 
     via sharad)

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java Thu May 12 11:26:26 2011
@@ -254,8 +254,8 @@ public class MapReduceChildJVM {
     for (CharSequence str : vargs) {
       mergedCommand.append(str).append(" ");
     }
-    Vector<String> vargsFinal = new Vector<String>(8);
-    vargsFinal.add("mkdir work;" + mergedCommand.toString());
+    Vector<String> vargsFinal = new Vector<String>(1);
+    vargsFinal.add(mergedCommand.toString());
     return vargsFinal;
   }
 }

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java Thu May 12 11:26:26 2011
@@ -55,7 +55,7 @@ public class MapTaskAttemptImpl extends 
   public Task createRemoteTask() {
     //job file name is set in TaskAttempt, setting it null here
     MapTask mapTask =
-      new MapTask(null, TypeConverter.fromYarn(getID()), partition,
+      new MapTask("", TypeConverter.fromYarn(getID()), partition,
           splitInfo.getSplitIndex(), 1); // YARN doesn't have the concept of slots per task, set it as 1.
     mapTask.setUser(conf.get(MRJobConfig.USER_NAME));
     mapTask.setConf(conf);

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java Thu May 12 11:26:26 2011
@@ -54,7 +54,7 @@ public class ReduceTaskAttemptImpl exten
   public Task createRemoteTask() {
   //job file name is set in TaskAttempt, setting it null here
     ReduceTask reduceTask =
-      new ReduceTask(null, TypeConverter.fromYarn(getID()), partition,
+      new ReduceTask("", TypeConverter.fromYarn(getID()), partition,
           numMapTasks, 1); // YARN doesn't have the concept of slots per task, set it as 1.
   reduceTask.setUser(conf.get(MRJobConfig.USER_NAME));
   reduceTask.setConf(conf);

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java Thu May 12 11:26:26 2011
@@ -57,7 +57,7 @@ import org.apache.log4j.LogManager;
  */
 class YarnChild {
 
-  public static final Log LOG = LogFactory.getLog(YarnChild.class);
+  private static final Log LOG = LogFactory.getLog(YarnChild.class);
 
   static volatile TaskAttemptID taskid = null;
 
@@ -194,7 +194,7 @@ class YarnChild {
     }
   }
 
-  static Token<JobTokenIdentifier> loadCredentials(JobConf conf,
+  private static Token<JobTokenIdentifier> loadCredentials(JobConf conf,
       InetSocketAddress address) throws IOException {
     //load token cache storage
     String jobTokenFile =
@@ -216,14 +216,18 @@ class YarnChild {
     return jt;
   }
 
-  static void configureLocalDirs(Task task, JobConf job) {
+  /**
+   * Configure mapred-local dirs. This config is used by the task for finding
+   * out an output directory.
+   */
+  private static void configureLocalDirs(Task task, JobConf job) {
     String[] localSysDirs = StringUtils.getTrimmedStrings(
         System.getenv(YARNApplicationConstants.LOCAL_DIR_ENV));
     job.setStrings(MRConfig.LOCAL_DIR, localSysDirs);
     LOG.info(MRConfig.LOCAL_DIR + " for child: " + job.get(MRConfig.LOCAL_DIR));
   }
 
-  static JobConf configureTask(Task task, Credentials credentials,
+  private static JobConf configureTask(Task task, Credentials credentials,
       Token<JobTokenIdentifier> jt) throws IOException {
     final JobConf job = new JobConf(YARNApplicationConstants.JOB_CONF_FILE);
     job.setCredentials(credentials);
@@ -241,10 +245,9 @@ class YarnChild {
     // setup the child's attempt directories
     // Do the task-type specific localization
     task.localizeConfiguration(job);
-    //write the localized task jobconf
-    LocalDirAllocator lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR);
-    Path localTaskFile =
-      lDirAlloc.getLocalPathForWrite(Constants.JOBFILE, job);
+    // Overwrite the localized task jobconf which is linked to in the current
+    // work-dir.
+    Path localTaskFile = new Path(Constants.JOBFILE);
     writeLocalJobFile(localTaskFile, job);
     task.setJobFile(localTaskFile.toString());
     task.setConf(job);
@@ -258,7 +261,7 @@ class YarnChild {
    * Write the task specific job-configuration file.
    * @throws IOException
    */
-  public static void writeLocalJobFile(Path jobFile, JobConf conf)
+  private static void writeLocalJobFile(Path jobFile, JobConf conf)
       throws IOException {
     FileSystem localFs = FileSystem.getLocal(conf);
     localFs.delete(jobFile);

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnOutputFiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnOutputFiles.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnOutputFiles.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnOutputFiles.java Thu May 12 11:26:26 2011
@@ -20,16 +20,14 @@ package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MRConfig;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
 /**
  * Manipulate the working area for the transient store for maps and reduces.
  *
@@ -43,11 +41,10 @@ public class YarnOutputFiles extends Map
 
   private JobConf conf;
 
-  //static final String MAP_OUTPUT_FILENAME_STRING = "file.out";
-  //static final String MAP_OUTPUT_INDEX_SUFFIX_STRING = ".index";
-  //static final String REDUCE_INPUT_FILE_FORMAT_STRING = "%s/map_%d.out";
-  static final String JOB_OUTPUT_DIR = "output";
-  static final String TMP_DIR = "%s/tmp";
+  private static final String JOB_OUTPUT_DIR = "output";
+  private static final String SPILL_FILE_PATTERN = "%s_spill_%d.out";
+  private static final String SPILL_INDEX_FILE_PATTERN =
+      "%s_spill_%d.out.index";
 
   public YarnOutputFiles() {
   }
@@ -89,9 +86,7 @@ public class YarnOutputFiles extends Map
    * Create a local map output file name on the same volume.
    */
   public Path getOutputFileForWriteInVolume(Path existing) {
-    // TODO
-    Path outputDir = new Path(existing.getParent().getParent().getParent(),
-        JOB_OUTPUT_DIR);
+    Path outputDir = new Path(existing.getParent(), JOB_OUTPUT_DIR);
     Path attemptOutputDir = new Path(outputDir,
         conf.get(JobContext.TASK_ATTEMPT_ID));
     return new Path(attemptOutputDir, MAP_OUTPUT_FILENAME_STRING);
@@ -129,9 +124,7 @@ public class YarnOutputFiles extends Map
    * Create a local map output index file name on the same volume.
    */
   public Path getOutputIndexFileForWriteInVolume(Path existing) {
-    // TODO
-    Path outputDir = new Path(existing.getParent().getParent().getParent(),
-        JOB_OUTPUT_DIR);
+    Path outputDir = new Path(existing.getParent(), JOB_OUTPUT_DIR);
     Path attemptOutputDir = new Path(outputDir,
         conf.get(JobContext.TASK_ATTEMPT_ID));
     return new Path(attemptOutputDir, MAP_OUTPUT_FILENAME_STRING +
@@ -147,8 +140,8 @@ public class YarnOutputFiles extends Map
    */
   public Path getSpillFile(int spillNumber) throws IOException {
     return lDirAlloc.getLocalPathToRead(
-        String.format(TMP_DIR, conf.get(JobContext.TASK_ATTEMPT_ID)) +
-          "/spill" + spillNumber + ".out", conf);
+        String.format(SPILL_FILE_PATTERN,
+            conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), conf);
   }
 
   /**
@@ -162,8 +155,8 @@ public class YarnOutputFiles extends Map
   public Path getSpillFileForWrite(int spillNumber, long size)
       throws IOException {
     return lDirAlloc.getLocalPathForWrite(
-        String.format(TMP_DIR, conf.get(JobContext.TASK_ATTEMPT_ID)) +
-          "/spill" + spillNumber + ".out", size, conf);
+        String.format(String.format(SPILL_FILE_PATTERN,
+            conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber)), size, conf);
   }
 
   /**
@@ -175,8 +168,8 @@ public class YarnOutputFiles extends Map
    */
   public Path getSpillIndexFile(int spillNumber) throws IOException {
     return lDirAlloc.getLocalPathToRead(
-        String.format(TMP_DIR, conf.get(JobContext.TASK_ATTEMPT_ID)) +
-          "/spill" + spillNumber + ".out.index", conf);
+        String.format(SPILL_INDEX_FILE_PATTERN,
+            conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), conf);
   }
 
   /**
@@ -190,8 +183,8 @@ public class YarnOutputFiles extends Map
   public Path getSpillIndexFileForWrite(int spillNumber, long size)
       throws IOException {
     return lDirAlloc.getLocalPathForWrite(
-        String.format(TMP_DIR, conf.get(JobContext.TASK_ATTEMPT_ID)) +
-          "/spill" + spillNumber + ".out.index", size, conf);
+        String.format(SPILL_INDEX_FILE_PATTERN,
+            conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), size, conf);
   }
 
   /**

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Thu May 12 11:26:26 2011
@@ -430,11 +430,16 @@ public abstract class TaskAttemptImpl im
     return memory;
   }
 
-  private static LocalResource getLocalResource(FileContext fc, Path file, 
-      LocalResourceType type, LocalResourceVisibility visibility) 
-  throws IOException {
+  /**
+   * Create a {@link LocalResource} record with all the given parameters.
+   * TODO: This should pave way for Builder pattern.
+   */
+  private static LocalResource createLocalResource(FileContext fc,
+      RecordFactory recordFactory, Path file, LocalResourceType type,
+      LocalResourceVisibility visibility) throws IOException {
     FileStatus fstat = fc.getFileStatus(file);
-    LocalResource resource = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(LocalResource.class);
+    LocalResource resource =
+        recordFactory.newRecordInstance(LocalResource.class);
     resource.setResource(ConverterUtils.getYarnUrlFromPath(fstat.getPath()));
     resource.setType(type);
     resource.setVisibility(visibility);
@@ -442,45 +447,43 @@ public abstract class TaskAttemptImpl im
     resource.setTimestamp(fstat.getModificationTime());
     return resource;
   }
-  
-  private ContainerLaunchContext getContainer() {
 
-    ContainerLaunchContext container = recordFactory.newRecordInstance(ContainerLaunchContext.class);
+  /**
+   * Create the {@link ContainerLaunchContext} for this attempt.
+   */
+  private ContainerLaunchContext createContainerLaunchContext() {
+
+    ContainerLaunchContext container =
+        recordFactory.newRecordInstance(ContainerLaunchContext.class);
 
     try {
       FileContext remoteFS = FileContext.getFileContext(conf);
-      
-      Path localizedJobConf = new Path(YARNApplicationConstants.JOB_CONF_FILE);
-      remoteTask.setJobFile(localizedJobConf.toString()); // Screwed!!!!!! (presumably this means "FIXME"...)
-      URL jobConfFileOnRemoteFS = ConverterUtils.getYarnUrlFromPath(localizedJobConf);
-      LOG.info("The job-conf file on the remote FS is " + jobConfFileOnRemoteFS);
-      
-      Path jobJar = remoteFS.makeQualified(new Path(remoteTask.getConf().get(MRJobConfig.JAR)));
-      URL jobJarFileOnRemoteFS = ConverterUtils.getYarnUrlFromPath(jobJar);
-      container.setLocalResource(YARNApplicationConstants.JOB_JAR, getLocalResource(remoteFS, jobJar, 
-          LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
-      LOG.info("The job-jar file on the remote FS is " + jobJarFileOnRemoteFS);
 
-      Path jobSubmitDir =
+      // //////////// Set up JobJar to be localized properly on the remote NM.
+      Path remoteJobJar =
+          remoteFS.makeQualified(new Path(remoteTask.getConf().get(
+              MRJobConfig.JAR)));
+      container.setLocalResource(
+          YARNApplicationConstants.JOB_JAR,
+          createLocalResource(remoteFS, recordFactory, remoteJobJar,
+              LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
+      LOG.info("The job-jar file on the remote FS is "
+          + remoteJobJar.toUri().toASCIIString());
+      // //////////// End of JobJar setup
+
+      // //////////// Set up JobConf to be localized properly on the remote NM.
+      Path remoteJobSubmitDir =
           new Path(conf.get(YARNApplicationConstants.APPS_STAGING_DIR_KEY),
               oldJobId.toString());
-      Path jobTokenFile =
-          remoteFS.makeQualified(new Path(jobSubmitDir,
-              YarnConfiguration.APPLICATION_TOKENS_FILE));
-      URL applicationTokenFileOnRemoteFS =
-          ConverterUtils.getYarnUrlFromPath(jobTokenFile);
-      // TODO: Looks like this is not needed. Revisit during localization
-      // cleanup.
-      //container.resources_todo.put(YarnConfiguration.APPLICATION_TOKENS_FILE,
-      //    getLocalResource(remoteFS, jobTokenFile, 
-      //        LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
-      
-      container.setLocalResource(YARNApplicationConstants.JOB_CONF_FILE,
-          getLocalResource(remoteFS,
-            new Path(jobSubmitDir, YARNApplicationConstants.JOB_CONF_FILE),
-            LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
-      LOG.info("The application token file on the remote FS is "
-          + applicationTokenFileOnRemoteFS);
+      Path remoteJobConfPath =
+          new Path(remoteJobSubmitDir, YARNApplicationConstants.JOB_CONF_FILE);
+      container.setLocalResource(
+          YARNApplicationConstants.JOB_CONF_FILE,
+          createLocalResource(remoteFS, recordFactory, remoteJobConfPath,
+              LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
+      LOG.info("The job-conf file on the remote FS is "
+          + remoteJobConfPath.toUri().toASCIIString());
+      // //////////// End of JobConf setup
 
       // Setup DistributedCache
       setupDistributedCache(conf, container);
@@ -869,7 +872,7 @@ public abstract class TaskAttemptImpl im
               taskAttempt.containerMgrAddress, taskAttempt.containerToken) {
         @Override
         public ContainerLaunchContext getContainer() {
-          return taskAttempt.getContainer();
+          return taskAttempt.createContainerLaunchContext();
         }
         @Override
         public Task getRemoteTask() {  // classic mapred Task, not YARN version

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java (original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java Thu May 12 11:26:26 2011
@@ -1,3 +1,21 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
 package org.apache.hadoop.yarn.api.records;
 
 import java.nio.ByteBuffer;
@@ -5,49 +23,49 @@ import java.util.List;
 import java.util.Map;
 
 public interface ContainerLaunchContext {
-  public abstract ContainerId getContainerId();
-  public abstract String getUser();
-  public abstract Resource getResource();
+  ContainerId getContainerId();
+  String getUser();
+  Resource getResource();
   
-  public abstract Map<String, LocalResource> getAllLocalResources();
-  public abstract LocalResource getLocalResource(String key);
+  Map<String, LocalResource> getAllLocalResources();
+  LocalResource getLocalResource(String key);
   
   
-  public abstract ByteBuffer getContainerTokens();
+  ByteBuffer getContainerTokens();
   
-  public abstract Map<String, ByteBuffer> getAllServiceData();
-  public abstract ByteBuffer getServiceData(String key);
+  Map<String, ByteBuffer> getAllServiceData();
+  ByteBuffer getServiceData(String key);
   
-  public abstract Map<String, String> getAllEnv();
-  public abstract String getEnv(String key);
+  Map<String, String> getAllEnv();
+  String getEnv(String key);
   
-  public abstract List<String> getCommandList();
-  public abstract String getCommand(int index); // TODO: Remove
-  public abstract int getCommandCount(); // TODO: Remove
+  List<String> getCommandList();
+  String getCommand(int index);
+  int getCommandCount();
   
-  public abstract void setContainerId(ContainerId containerId);
-  public abstract void setUser(String user);
-  public abstract void setResource(Resource resource);
+  void setContainerId(ContainerId containerId);
+  void setUser(String user);
+  void setResource(Resource resource);
   
-  public abstract void addAllLocalResources(Map<String, LocalResource> localResources);
-  public abstract void setLocalResource(String key, LocalResource value);
-  public abstract void removeLocalResource(String key);
-  public abstract void clearLocalResources();
+  void addAllLocalResources(Map<String, LocalResource> localResources);
+  void setLocalResource(String key, LocalResource value);
+  void removeLocalResource(String key);
+  void clearLocalResources();
   
-  public abstract void setContainerTokens(ByteBuffer containerToken);
+  void setContainerTokens(ByteBuffer containerToken);
   
-  public abstract void addAllServiceData(Map<String, ByteBuffer> serviceData);
-  public abstract void setServiceData(String key, ByteBuffer value);
-  public abstract void removeServiceData(String key);
-  public abstract void clearServiceData();
+  void addAllServiceData(Map<String, ByteBuffer> serviceData);
+  void setServiceData(String key, ByteBuffer value);
+  void removeServiceData(String key);
+  void clearServiceData();
   
-  public abstract void addAllEnv(Map<String, String> env);
-  public abstract void setEnv(String key, String value);
-  public abstract void removeEnv(String key);
-  public abstract void clearEnv();
+  void addAllEnv(Map<String, String> env);
+  void setEnv(String key, String value);
+  void removeEnv(String key);
+  void clearEnv();
   
-  public abstract void addAllCommands(List<String> commands);
-  public abstract void addCommand(String command);
-  public abstract void removeCommand(int index); // TODO: Remove
-  public abstract void clearCommands();
+  void addAllCommands(List<String> commands);
+  void addCommand(String command);
+  void removeCommand(int index);
+  void clearCommands();
 }

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java (original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java Thu May 12 11:26:26 2011
@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.server.no
 import java.io.File;
 import java.io.IOException;
 import java.net.InetSocketAddress;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -124,6 +125,7 @@ public class DefaultContainerExecutor ex
           ContainerExecutor.TASK_LAUNCH_SCRIPT_PERMISSION);
       String[] command = 
           new String[] { "bash", "-c", launchDst.toUri().getPath().toString() };
+      LOG.info("launchContainer: " + Arrays.toString(command));
       shExec = new ShellCommandExecutor(command,
           new File(containerWorkDir.toUri().getPath()));
       launchCommandObjs.put(container.getLaunchContext().getContainerId(), shExec);

Modified: hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java?rev=1102243&r1=1102242&r2=1102243&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java (original)
+++ hadoop/mapreduce/branches/MR-279/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java Thu May 12 11:26:26 2011
@@ -147,13 +147,13 @@ public class LinuxContainerExecutor exte
   public int launchContainer(Container container,
       Path nmPrivateCotainerScriptPath, Path nmPrivateTokensPath,
       String user, String appId, Path containerWorkDir) throws IOException {
-
+    String containerIdStr = ConverterUtils.toString(container.getContainerID());
     List<String> command = new ArrayList<String>(
       Arrays.asList(containerExecutorExe, 
                     user, 
                     Integer.toString(Commands.LAUNCH_CONTAINER.getValue()),
                     appId,
-                    container.toString(),
+                    containerIdStr,
                     containerWorkDir.toString(),
                     nmPrivateCotainerScriptPath.toUri().getPath().toString(),
                     nmPrivateTokensPath.toUri().getPath().toString()));