You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by sz...@apache.org on 2012/11/18 23:31:46 UTC
svn commit: r1411007 [2/2] - in
/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project: ./ conf/
dev-support/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/hadoop-mapreduce-clien...
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java Sun Nov 18 22:31:28 2012
@@ -20,9 +20,10 @@ package org.apache.hadoop.mapreduce.util
import java.io.BufferedReader;
import java.io.File;
+import java.io.FileInputStream;
import java.io.FileNotFoundException;
-import java.io.FileReader;
import java.io.IOException;
+import java.io.InputStreamReader;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
@@ -39,6 +40,8 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.apache.hadoop.util.StringUtils;
+import com.google.common.base.Charsets;
+
/**
* A Proc file-system based ProcessTree. Works only on Linux.
*/
@@ -350,7 +353,7 @@ public class ProcfsBasedProcessTree exte
}
private static final String PROCESSTREE_DUMP_FORMAT =
- "\t|- %s %s %d %d %s %d %d %d %d %s\n";
+ "\t|- %s %s %d %d %s %d %d %d %d %s%n";
/**
* Get a dump of the process-tree.
@@ -363,7 +366,7 @@ public class ProcfsBasedProcessTree exte
// The header.
ret.append(String.format("\t|- PID PPID PGRPID SESSID CMD_NAME "
+ "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
- + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"));
+ + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE%n"));
for (ProcessInfo p : processTree.values()) {
if (p != null) {
ret.append(String.format(PROCESSTREE_DUMP_FORMAT, p.getPid(), p
@@ -505,10 +508,11 @@ public class ProcfsBasedProcessTree exte
ProcessInfo ret = null;
// Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat
BufferedReader in = null;
- FileReader fReader = null;
+ InputStreamReader fReader = null;
try {
File pidDir = new File(procfsDir, pinfo.getPid());
- fReader = new FileReader(new File(pidDir, PROCFS_STAT_FILE));
+ fReader = new InputStreamReader(new FileInputStream(
+ new File(pidDir, PROCFS_STAT_FILE)), Charsets.UTF_8);
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
// The process vanished in the interim!
@@ -695,11 +699,11 @@ public class ProcfsBasedProcessTree exte
return ret;
}
BufferedReader in = null;
- FileReader fReader = null;
+ InputStreamReader fReader = null;
try {
- fReader =
- new FileReader(new File(new File(procfsDir, pid),
- PROCFS_CMDLINE_FILE));
+ fReader = new InputStreamReader(new FileInputStream(
+ new File(new File(procfsDir, pid), PROCFS_CMDLINE_FILE)),
+ Charsets.UTF_8);
} catch (FileNotFoundException f) {
// The process vanished in the interim!
return ret;
Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1408927-1410997
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java Sun Nov 18 22:31:28 2012
@@ -535,8 +535,9 @@ public class HistoryFileManager extends
if (serialPart == null) {
LOG.warn("Could not find serial portion from path: "
+ serialDirPath.toString() + ". Continuing with next");
+ } else {
+ serialNumberIndex.add(serialPart, timestampPart);
}
- serialNumberIndex.add(serialPart, timestampPart);
}
private void addDirectoryToJobListCache(Path path) throws IOException {
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java Sun Nov 18 22:31:28 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce.v2.h
import java.text.SimpleDateFormat;
import java.util.Date;
+import org.apache.commons.lang.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
@@ -37,7 +38,7 @@ import com.google.inject.Inject;
*/
public class HsJobsBlock extends HtmlBlock {
final AppContext appContext;
- static final SimpleDateFormat dateFormat =
+ final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
@Inject HsJobsBlock(AppContext appCtx) {
@@ -67,22 +68,36 @@ public class HsJobsBlock extends HtmlBlo
th("Reduces Completed")._()._().
tbody();
LOG.info("Getting list of all Jobs.");
+ // Write all the data into a JavaScript array of arrays for JQuery
+ // DataTables to display
+ StringBuilder jobsTableData = new StringBuilder("[\n");
for (Job j : appContext.getAllJobs().values()) {
JobInfo job = new JobInfo(j);
- tbody.
- tr().
- td(dateFormat.format(new Date(job.getStartTime()))).
- td(dateFormat.format(new Date(job.getFinishTime()))).
- td().a(url("job", job.getId()), job.getId())._().
- td(job.getName()).
- td(job.getUserName()).
- td(job.getQueueName()).
- td(job.getState()).
- td(String.valueOf(job.getMapsTotal())).
- td(String.valueOf(job.getMapsCompleted())).
- td(String.valueOf(job.getReducesTotal())).
- td(String.valueOf(job.getReducesCompleted()))._();
+ jobsTableData.append("[\"")
+ .append(dateFormat.format(new Date(job.getStartTime()))).append("\",\"")
+ .append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
+ .append("<a href='").append(url("job", job.getId())).append("'>")
+ .append(job.getId()).append("</a>\",\"")
+ .append(StringEscapeUtils.escapeHtml(job.getName()))
+ .append("\",\"")
+ .append(StringEscapeUtils.escapeHtml(job.getUserName()))
+ .append("\",\"")
+ .append(StringEscapeUtils.escapeHtml(job.getQueueName()))
+ .append("\",\"")
+ .append(job.getState()).append("\",\"")
+ .append(String.valueOf(job.getMapsTotal())).append("\",\"")
+ .append(String.valueOf(job.getMapsCompleted())).append("\",\"")
+ .append(String.valueOf(job.getReducesTotal())).append("\",\"")
+ .append(String.valueOf(job.getReducesCompleted())).append("\"],\n");
}
+
+ //Remove the last comma and close off the array of arrays
+ if(jobsTableData.charAt(jobsTableData.length() - 2) == ',') {
+ jobsTableData.delete(jobsTableData.length()-2, jobsTableData.length()-1);
+ }
+ jobsTableData.append("]");
+ html.script().$type("text/javascript").
+ _("var jobsTableData=" + jobsTableData)._();
tbody._().
tfoot().
tr().
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java Sun Nov 18 22:31:28 2012
@@ -70,7 +70,6 @@ public class HsNavBlock extends HtmlBloc
li().a("/conf", "Configuration")._().
li().a("/logs", "Local logs")._().
li().a("/stacks", "Server stacks")._().
- li().a("/metrics", "Server metrics")._()._()._().
- div("#themeswitcher")._();
+ li().a("/metrics", "Server metrics")._()._()._();
}
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Sun Nov 18 22:31:28 2012
@@ -29,7 +29,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -100,6 +99,10 @@ public class HsTasksBlock extends HtmlBl
theadRow.th("Elapsed Time"); //Attempt
TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
+
+ // Write all the data into a JavaScript array of arrays for JQuery
+ // DataTables to display
+ StringBuilder tasksTableData = new StringBuilder("[\n");
for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
@@ -137,55 +140,36 @@ public class HsTasksBlock extends HtmlBl
attemptFinishTime = ta.getFinishTime();
attemptElapsed = ta.getElapsedTime();
}
+ tasksTableData.append("[\"")
+ .append("<a href='" + url("task", tid)).append("'>")
+ .append(tid).append("</a>\",\"")
+ .append(info.getState()).append("\",\"")
+ .append(startTime).append("\",\"")
+ .append(finishTime).append("\",\"")
+ .append(elapsed).append("\",\"")
+ .append(attemptStartTime).append("\",\"");
- TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
- row.
- td().
- br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
- a(url("task", tid), tid)._().
- td(info.getState()).
- td().
- br().$title(String.valueOf(startTime))._().
- _(Times.format(startTime))._().
- td().
- br().$title(String.valueOf(finishTime))._().
- _(Times.format(finishTime))._().
- td().
- br().$title(String.valueOf(elapsed))._().
- _(formatTime(elapsed))._().
- td().
- br().$title(String.valueOf(attemptStartTime))._().
- _(Times.format(attemptStartTime))._();
if(type == TaskType.REDUCE) {
- row.td().
- br().$title(String.valueOf(shuffleFinishTime))._().
- _(Times.format(shuffleFinishTime))._();
- row.td().
- br().$title(String.valueOf(sortFinishTime))._().
- _(Times.format(sortFinishTime))._();
+ tasksTableData.append(shuffleFinishTime).append("\",\"")
+ .append(sortFinishTime).append("\",\"");
}
- row.
- td().
- br().$title(String.valueOf(attemptFinishTime))._().
- _(Times.format(attemptFinishTime))._();
-
+ tasksTableData.append(attemptFinishTime).append("\",\"");
if(type == TaskType.REDUCE) {
- row.td().
- br().$title(String.valueOf(elapsedShuffleTime))._().
- _(formatTime(elapsedShuffleTime))._();
- row.td().
- br().$title(String.valueOf(elapsedSortTime))._().
- _(formatTime(elapsedSortTime))._();
- row.td().
- br().$title(String.valueOf(elapsedReduceTime))._().
- _(formatTime(elapsedReduceTime))._();
+ tasksTableData.append(elapsedShuffleTime).append("\",\"")
+ .append(elapsedSortTime).append("\",\"")
+ .append(elapsedReduceTime).append("\",\"");
}
-
- row.td().
- br().$title(String.valueOf(attemptElapsed))._().
- _(formatTime(attemptElapsed))._();
- row._();
+ tasksTableData.append(attemptElapsed).append("\"],\n");
}
+ //Remove the last comma and close off the array of arrays
+ if(tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
+ tasksTableData.delete(
+ tasksTableData.length()-2, tasksTableData.length()-1);
+ }
+ tasksTableData.append("]");
+ html.script().$type("text/javascript").
+ _("var tasksTableData=" + tasksTableData)._();
+
TR<TFOOT<TABLE<Hamlet>>> footRow = tbody._().tfoot().tr();
footRow.th().input("search_init").$type(InputType.text).$name("task")
.$value("ID")._()._().th().input("search_init").$type(InputType.text)
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java Sun Nov 18 22:31:28 2012
@@ -67,18 +67,32 @@ public class HsTasksPage extends HsView
type = MRApps.taskType(symbol);
}
StringBuilder b = tableInit().
- append(",aoColumnDefs:[");
- b.append("{'sType':'title-numeric', 'aTargets': [ 0, 4");
+ append(", 'aaData': tasksTableData");
+ b.append(", bDeferRender: true");
+ b.append(", bProcessing: true");
+
+ b.append("\n, aoColumnDefs: [\n");
+ b.append("{'sType':'numeric', 'aTargets': [ 0 ]");
+ b.append(", 'mRender': parseHadoopID }");
+
+ b.append(", {'sType':'numeric', 'aTargets': [ 4");
if(type == TaskType.REDUCE) {
b.append(", 9, 10, 11, 12");
} else { //MAP
b.append(", 7");
}
- b.append(" ] }]");
+ b.append(" ], 'mRender': renderHadoopElapsedTime }");
- // Sort by id upon page load
- b.append(", aaSorting: [[0, 'asc']]");
+ b.append("\n, {'sType':'numeric', 'aTargets': [ 2, 3, 5");
+ if(type == TaskType.REDUCE) {
+ b.append(", 6, 7, 8");
+ } else { //MAP
+ b.append(", 6");
+ }
+ b.append(" ], 'mRender': renderHadoopDate }]");
+ // Sort by id upon page load
+ b.append("\n, aaSorting: [[0, 'asc']]");
b.append("}");
return b.toString();
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java Sun Nov 18 22:31:28 2012
@@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.web
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.THEMESWITCHER_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.postInitID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
@@ -54,7 +53,6 @@ public class HsView extends TwoColumnLay
protected void commonPreHead(Page.HTML<_> html) {
set(ACCORDION_ID, "nav");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
- set(THEMESWITCHER_ID, "themeswitcher");
}
/*
@@ -84,17 +82,15 @@ public class HsView extends TwoColumnLay
*/
private String jobsTableInit() {
return tableInit().
+ append(", 'aaData': jobsTableData").
+ append(", bDeferRender: true").
+ append(", bProcessing: true").
+
// Sort by id upon page load
append(", aaSorting: [[2, 'desc']]").
append(", aoColumnDefs:[").
- // Maps Total
- append("{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 7 ] }").
- // Maps Completed
- append(",{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 8 ] }").
- // Reduces Total
- append(",{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 9 ] }").
- // Reduces Completed
- append(",{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 10 ] }").
+ // Maps Total, Maps Completed, Reduces Total and Reduces Completed
+ append("{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 7, 8, 9, 10 ] }").
append("]}").
toString();
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java Sun Nov 18 22:31:28 2012
@@ -37,6 +37,7 @@ import java.io.RandomAccessFile;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.ByteBuffer;
+import java.nio.channels.ClosedChannelException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -45,6 +46,7 @@ import java.util.concurrent.ConcurrentHa
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
+import java.util.regex.Pattern;
import javax.crypto.SecretKey;
@@ -106,6 +108,7 @@ import org.jboss.netty.handler.ssl.SslHa
import org.jboss.netty.handler.stream.ChunkedWriteHandler;
import org.jboss.netty.util.CharsetUtil;
+import com.google.common.base.Charsets;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
public class ShuffleHandler extends AbstractService
@@ -119,10 +122,16 @@ public class ShuffleHandler extends Abst
public static final String SHUFFLE_READAHEAD_BYTES = "mapreduce.shuffle.readahead.bytes";
public static final int DEFAULT_SHUFFLE_READAHEAD_BYTES = 4 * 1024 * 1024;
+ // pattern to identify errors related to the client closing the socket early
+ // idea borrowed from Netty SslHandler
+ private static final Pattern IGNORABLE_ERROR_MESSAGE = Pattern.compile(
+ "^.*(?:connection.*reset|connection.*closed|broken.*pipe).*$",
+ Pattern.CASE_INSENSITIVE);
+
private int port;
private ChannelFactory selector;
private final ChannelGroup accepted = new DefaultChannelGroup();
- private HttpPipelineFactory pipelineFact;
+ protected HttpPipelineFactory pipelineFact;
private int sslFileBufferSize;
/**
@@ -318,13 +327,17 @@ public class ShuffleHandler extends Abst
}
}
+ protected Shuffle getShuffle(Configuration conf) {
+ return new Shuffle(conf);
+ }
+
class HttpPipelineFactory implements ChannelPipelineFactory {
final Shuffle SHUFFLE;
private SSLFactory sslFactory;
public HttpPipelineFactory(Configuration conf) throws Exception {
- SHUFFLE = new Shuffle(conf);
+ SHUFFLE = getShuffle(conf);
if (conf.getBoolean(MRConfig.SHUFFLE_SSL_ENABLED_KEY,
MRConfig.SHUFFLE_SSL_ENABLED_DEFAULT)) {
sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf);
@@ -464,7 +477,7 @@ public class ShuffleHandler extends Abst
lastMap.addListener(ChannelFutureListener.CLOSE);
}
- private void verifyRequest(String appid, ChannelHandlerContext ctx,
+ protected void verifyRequest(String appid, ChannelHandlerContext ctx,
HttpRequest request, HttpResponse response, URL requestUri)
throws IOException {
SecretKey tokenSecret = secretManager.retrieveTokenSecret(appid);
@@ -490,7 +503,8 @@ public class ShuffleHandler extends Abst
SecureShuffleUtils.verifyReply(urlHashStr, enc_str, tokenSecret);
// verification passed - encode the reply
String reply =
- SecureShuffleUtils.generateHash(urlHashStr.getBytes(), tokenSecret);
+ SecureShuffleUtils.generateHash(urlHashStr.getBytes(Charsets.UTF_8),
+ tokenSecret);
response.setHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
if (LOG.isDebugEnabled()) {
int len = reply.length();
@@ -564,12 +578,12 @@ public class ShuffleHandler extends Abst
return writeFuture;
}
- private void sendError(ChannelHandlerContext ctx,
+ protected void sendError(ChannelHandlerContext ctx,
HttpResponseStatus status) {
sendError(ctx, "", status);
}
- private void sendError(ChannelHandlerContext ctx, String message,
+ protected void sendError(ChannelHandlerContext ctx, String message,
HttpResponseStatus status) {
HttpResponse response = new DefaultHttpResponse(HTTP_1_1, status);
response.setHeader(CONTENT_TYPE, "text/plain; charset=UTF-8");
@@ -588,6 +602,16 @@ public class ShuffleHandler extends Abst
if (cause instanceof TooLongFrameException) {
sendError(ctx, BAD_REQUEST);
return;
+ } else if (cause instanceof IOException) {
+ if (cause instanceof ClosedChannelException) {
+ LOG.debug("Ignoring closed channel error", cause);
+ return;
+ }
+ String message = String.valueOf(cause.getMessage());
+ if (IGNORABLE_ERROR_MESSAGE.matcher(message).matches()) {
+ LOG.debug("Ignoring client socket close", cause);
+ return;
+ }
}
LOG.error("Shuffle error: ", cause);
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java Sun Nov 18 22:31:28 2012
@@ -17,17 +17,35 @@
*/
package org.apache.hadoop.mapred;
+import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
+import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.apache.hadoop.test.MockitoMaker.make;
+import static org.apache.hadoop.test.MockitoMaker.stub;
+import static org.jboss.netty.buffer.ChannelBuffers.wrappedBuffer;
+import static org.junit.Assert.assertEquals;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSource;
-import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
import org.apache.hadoop.metrics2.MetricsSystem;
-import static org.apache.hadoop.test.MetricsAsserts.*;
-
+import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
+import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
-
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.handler.codec.http.HttpRequest;
+import org.jboss.netty.handler.codec.http.HttpResponse;
+import org.jboss.netty.handler.codec.http.HttpResponseStatus;
+import org.junit.Assert;
import org.junit.Test;
-import static org.junit.Assert.*;
-import static org.apache.hadoop.test.MockitoMaker.*;
public class TestShuffleHandler {
static final long MiB = 1024 * 1024;
@@ -69,4 +87,76 @@ public class TestShuffleHandler {
assertCounter("ShuffleOutputsOK", succeeded, rb);
assertGauge("ShuffleConnections", connections, rb);
}
+
+ @Test
+ public void testClientClosesConnection() throws Exception {
+ final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
+ Configuration conf = new Configuration();
+ conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
+ ShuffleHandler shuffleHandler = new ShuffleHandler() {
+ @Override
+ protected Shuffle getShuffle(Configuration conf) {
+ // replace the shuffle handler with one stubbed for testing
+ return new Shuffle(conf) {
+ @Override
+ protected void verifyRequest(String appid, ChannelHandlerContext ctx,
+ HttpRequest request, HttpResponse response, URL requestUri)
+ throws IOException {
+ }
+ @Override
+ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx,
+ Channel ch, String user, String jobId, String mapId, int reduce)
+ throws IOException {
+ // send a shuffle header and a lot of data down the channel
+ // to trigger a broken pipe
+ ShuffleHeader header =
+ new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
+ DataOutputBuffer dob = new DataOutputBuffer();
+ header.write(dob);
+ ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
+ dob = new DataOutputBuffer();
+ for (int i=0; i<100000; ++i) {
+ header.write(dob);
+ }
+ return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
+ }
+ @Override
+ protected void sendError(ChannelHandlerContext ctx,
+ HttpResponseStatus status) {
+ if (failures.size() == 0) {
+ failures.add(new Error());
+ ctx.getChannel().close();
+ }
+ }
+ @Override
+ protected void sendError(ChannelHandlerContext ctx, String message,
+ HttpResponseStatus status) {
+ if (failures.size() == 0) {
+ failures.add(new Error());
+ ctx.getChannel().close();
+ }
+ }
+ };
+ }
+ };
+ shuffleHandler.init(conf);
+ shuffleHandler.start();
+
+ // simulate a reducer that closes early by reading a single shuffle header
+ // then closing the connection
+ URL url = new URL("http://127.0.0.1:"
+ + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
+ + "/mapOutput?job=job_12345_1&reduce=1&map=attempt_12345_1_m_1_0");
+ HttpURLConnection conn = (HttpURLConnection)url.openConnection();
+ conn.connect();
+ DataInputStream input = new DataInputStream(conn.getInputStream());
+ Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+ ShuffleHeader header = new ShuffleHeader();
+ header.readFields(input);
+ input.close();
+
+ shuffleHandler.stop();
+ Assert.assertTrue("sendError called when client closed connection",
+ failures.size() == 0);
+ }
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml Sun Nov 18 22:31:28 2012
@@ -60,4 +60,10 @@
<Bug pattern="ICAST_QUESTIONABLE_UNSIGNED_RIGHT_SHIFT" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.examples.terasort.TeraInputFormat" />
+ <Method name="getSplits" />
+ <Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
+ </Match>
+
</FindBugsFilter>
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml Sun Nov 18 22:31:28 2012
@@ -103,6 +103,11 @@
<artifactId>hsqldb</artifactId>
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <scope>provided</scope>
+ </dependency>
</dependencies>
<build>
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java Sun Nov 18 22:31:28 2012
@@ -22,7 +22,9 @@ import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.OutputStream;
+import java.io.OutputStreamWriter;
import java.io.PrintStream;
+import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -50,6 +52,8 @@ import org.apache.hadoop.mapreduce.lib.o
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import com.google.common.base.Charsets;
+
/**
* A map/reduce program that uses Bailey-Borwein-Plouffe to compute exact
* digits of Pi.
@@ -151,7 +155,8 @@ public class BaileyBorweinPlouffe extend
LOG.info("Writing text output to " + outfile);
final OutputStream outputstream = fs.create(outfile);
try {
- final PrintStream out = new PrintStream(outputstream, true);
+ final PrintWriter out = new PrintWriter(
+ new OutputStreamWriter(outputstream, Charsets.UTF_8), true);
// write hex text
print(out, hex.iterator(), "Pi = 0x3.", "%02X", 5, 5);
out.println("Total number of hexadecimal digits is "
@@ -184,7 +189,7 @@ public class BaileyBorweinPlouffe extend
}
/** Print out elements in a nice format. */
- private static <T> void print(PrintStream out, Iterator<T> iterator,
+ private static <T> void print(PrintWriter out, Iterator<T> iterator,
String prefix, String format, int elementsPerGroup, int groupsPerLine) {
final StringBuilder sb = new StringBuilder("\n");
for (int i = 0; i < prefix.length(); i++)
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java Sun Nov 18 22:31:28 2012
@@ -37,6 +37,8 @@ import org.apache.hadoop.mapreduce.lib.o
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import com.google.common.base.Charsets;
+
public class WordMean extends Configured implements Tool {
private double mean = 0;
@@ -125,7 +127,7 @@ public class WordMean extends Configured
// average = total sum / number of elements;
try {
- br = new BufferedReader(new InputStreamReader(fs.open(file)));
+ br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
long count = 0;
long length = 0;
@@ -151,7 +153,9 @@ public class WordMean extends Configured
System.out.println("The mean is: " + theMean);
return theMean;
} finally {
- br.close();
+ if (br != null) {
+ br.close();
+ }
}
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java Sun Nov 18 22:31:28 2012
@@ -38,6 +38,8 @@ import org.apache.hadoop.mapreduce.lib.o
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import com.google.common.base.Charsets;
+
public class WordMedian extends Configured implements Tool {
private double median = 0;
@@ -127,7 +129,7 @@ public class WordMedian extends Configur
BufferedReader br = null;
try {
- br = new BufferedReader(new InputStreamReader(fs.open(file)));
+ br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
int num = 0;
String line;
@@ -157,7 +159,9 @@ public class WordMedian extends Configur
}
}
} finally {
- br.close();
+ if (br != null) {
+ br.close();
+ }
}
// error, no median found
return -1;
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java Sun Nov 18 22:31:28 2012
@@ -37,6 +37,8 @@ import org.apache.hadoop.mapreduce.lib.o
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import com.google.common.base.Charsets;
+
public class WordStandardDeviation extends Configured implements Tool {
private double stddev = 0;
@@ -135,7 +137,7 @@ public class WordStandardDeviation exten
double stddev = 0;
BufferedReader br = null;
try {
- br = new BufferedReader(new InputStreamReader(fs.open(file)));
+ br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
long count = 0;
long length = 0;
long square = 0;
@@ -166,7 +168,9 @@ public class WordStandardDeviation exten
stddev = Math.sqrt((term - mean));
System.out.println("The standard deviation is: " + stddev);
} finally {
- br.close();
+ if (br != null) {
+ br.close();
+ }
}
return stddev;
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java Sun Nov 18 22:31:28 2012
@@ -33,6 +33,8 @@ import org.apache.hadoop.mapreduce.lib.i
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.*;
+import com.google.common.base.Charsets;
+
/**
* Launch a distributed pentomino solver.
* It generates a complete list of prefixes of length N with each unique prefix
@@ -137,9 +139,9 @@ public class DistributedPentomino extend
fs.mkdirs(dir);
List<int[]> splits = pent.getSplits(depth);
Path input = new Path(dir, "part1");
- PrintStream file =
- new PrintStream(new BufferedOutputStream
- (fs.create(input), 64*1024));
+ PrintWriter file =
+ new PrintWriter(new OutputStreamWriter(new BufferedOutputStream
+ (fs.create(input), 64*1024), Charsets.UTF_8));
for(int[] prefix: splits) {
for(int i=0; i < prefix.length; ++i) {
if (i != 0) {
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java Sun Nov 18 22:31:28 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.examples.danci
import java.io.*;
import java.util.*;
+import com.google.common.base.Charsets;
+
/**
* This class uses the dancing links algorithm from Knuth to solve sudoku
* puzzles. It has solved 42x42 puzzles in 1.02 seconds.
@@ -133,7 +135,8 @@ public class Sudoku {
* @param stream The input stream to read the data from
*/
public Sudoku(InputStream stream) throws IOException {
- BufferedReader file = new BufferedReader(new InputStreamReader(stream));
+ BufferedReader file = new BufferedReader(
+ new InputStreamReader(stream, Charsets.UTF_8));
String line = file.readLine();
List<int[]> result = new ArrayList<int[]>();
while (line != null) {
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java Sun Nov 18 22:31:28 2012
@@ -19,9 +19,11 @@ package org.apache.hadoop.examples.pi;
import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
@@ -32,6 +34,8 @@ import java.util.TreeMap;
import org.apache.hadoop.examples.pi.math.Bellard;
import org.apache.hadoop.examples.pi.math.Bellard.Parameter;
+import com.google.common.base.Charsets;
+
/** A class for parsing outputs */
public final class Parser {
static final String VERBOSE_PROPERTY = "pi.parser.verbose";
@@ -71,7 +75,8 @@ public final class Parser {
for(Parameter p : Parameter.values())
m.put(p, new ArrayList<TaskResult>());
- final BufferedReader in = new BufferedReader(new FileReader(f));
+ final BufferedReader in = new BufferedReader(
+ new InputStreamReader(new FileInputStream(f), Charsets.UTF_8));
try {
for(String line; (line = in.readLine()) != null; )
try {
@@ -127,7 +132,8 @@ public final class Parser {
Collections.sort(results);
final PrintWriter out = new PrintWriter(
- new FileWriter(new File(outputdir, p + ".txt")), true);
+ new OutputStreamWriter(new FileOutputStream(
+ new File(outputdir, p + ".txt")), Charsets.UTF_8), true);
try {
for(int i = 0; i < results.size(); i++)
out.println(DistSum.taskResult2string(p + "." + i, results.get(i)));
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java Sun Nov 18 22:31:28 2012
@@ -19,9 +19,10 @@ package org.apache.hadoop.examples.pi;
import java.io.BufferedReader;
import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
@@ -46,6 +47,8 @@ import org.apache.hadoop.fs.permission.F
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.ToolRunner;
+import com.google.common.base.Charsets;
+
/** Utility methods */
public class Util {
/** Output stream */
@@ -81,7 +84,7 @@ public class Util {
final long t = System.currentTimeMillis();
final long delta = t - (isAccumulative? start: previous);
if (s != null) {
- out.format("%15dms (=%-15s: %s\n", delta, millis2String(delta) + ")", s);
+ out.format("%15dms (=%-15s: %s%n", delta, millis2String(delta) + ")", s);
out.flush();
}
previous = t;
@@ -203,16 +206,16 @@ public class Util {
throw new IllegalArgumentException("dir (=" + dir + ") is not a directory.");
}
- private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
/** Create a writer of a local file. */
public static PrintWriter createWriter(File dir, String prefix) throws IOException {
checkDirectory(dir);
-
+
+ SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
for(;;) {
final File f = new File(dir,
- prefix + DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".txt");
+ prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt");
if (!f.exists())
- return new PrintWriter(new FileWriter(f));
+ return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), Charsets.UTF_8));
try {Thread.sleep(10);} catch (InterruptedException e) {}
}
@@ -286,7 +289,8 @@ public class Util {
final List<TaskResult> results = new ArrayList<TaskResult>();
for(FileStatus status : fs.listStatus(outdir)) {
if (status.getPath().getName().startsWith("part-")) {
- final BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(status.getPath())));
+ final BufferedReader in = new BufferedReader(
+ new InputStreamReader(fs.open(status.getPath()), Charsets.UTF_8));
try {
for(String line; (line = in.readLine()) != null; )
results.add(TaskResult.valueOf(line));
@@ -305,7 +309,7 @@ public class Util {
static void writeResults(String name, List<TaskResult> results, FileSystem fs, String dir) throws IOException {
final Path outfile = new Path(dir, name + ".txt");
Util.out.println(name + "> writing results to " + outfile);
- final PrintStream out = new PrintStream(fs.create(outfile), true);
+ final PrintWriter out = new PrintWriter(new OutputStreamWriter(fs.create(outfile), Charsets.UTF_8), true);
try {
for(TaskResult r : results)
out.println(r);
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java?rev=1411007&r1=1411006&r2=1411007&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java Sun Nov 18 22:31:28 2012
@@ -29,6 +29,8 @@ import org.apache.hadoop.mapreduce.Input
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
+import com.google.common.base.Charsets;
+
class TeraScheduler {
static String USE = "mapreduce.terasort.use.terascheduler";
private static final Log LOG = LogFactory.getLog(TeraScheduler.class);
@@ -73,7 +75,8 @@ class TeraScheduler {
List<String> readFile(String filename) throws IOException {
List<String> result = new ArrayList<String>(10000);
- BufferedReader in = new BufferedReader(new FileReader(filename));
+ BufferedReader in = new BufferedReader(
+ new InputStreamReader(new FileInputStream(filename), Charsets.UTF_8));
String line = in.readLine();
while (line != null) {
result.add(line);