You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ar...@apache.org on 2013/08/27 19:05:14 UTC
svn commit: r1517887 - in
/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/
hadoop-mapreduce-client/hadoop-mapreduce-client-hs/...
Author: arp
Date: Tue Aug 27 17:05:13 2013
New Revision: 1517887
URL: http://svn.apache.org/r1517887
Log:
Merge all changes from trunk to branch HDFS-2832.
Modified:
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/ (props changed)
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1517029-1517886
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt?rev=1517887&r1=1517886&r2=1517887&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt Tue Aug 27 17:05:13 2013
@@ -181,6 +181,9 @@ Release 2.1.1-beta - UNRELEASED
IMPROVEMENTS
+ MAPREDUCE-5478. TeraInputFormat unnecessarily defines its own FileSplit
+ subclass (Sandy Ryza)
+
OPTIMIZATIONS
MAPREDUCE-5446. TestJobHistoryEvents and TestJobHistoryParsing have race
Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1517029-1517886
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java?rev=1517887&r1=1517886&r2=1517887&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java Tue Aug 27 17:05:13 2013
@@ -129,6 +129,15 @@ public class JHAdminConfig {
public static final int DEFAULT_MR_HISTORY_WEBAPP_PORT = 19888;
public static final String DEFAULT_MR_HISTORY_WEBAPP_ADDRESS =
"0.0.0.0:" + DEFAULT_MR_HISTORY_WEBAPP_PORT;
+
+ /**The kerberos principal to be used for spnego filter for history server*/
+ public static final String MR_WEBAPP_SPNEGO_USER_NAME_KEY =
+ MR_HISTORY_PREFIX + "webapp.spnego-principal";
+
+ /** The kerberos keytab to be used for spnego filter for history server*/
+ public static final String MR_WEBAPP_SPNEGO_KEYTAB_FILE_KEY =
+ MR_HISTORY_PREFIX + "webapp.spnego-keytab-file";
+
/*
* HS Service Authorization
*/
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1517887&r1=1517886&r2=1517887&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Tue Aug 27 17:05:13 2013
@@ -80,6 +80,7 @@ import org.apache.hadoop.security.UserGr
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
@@ -148,8 +149,14 @@ public class HistoryClientService extend
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT);
// NOTE: there should be a .at(InetSocketAddress)
- WebApps.$for("jobhistory", HistoryClientService.class, this, "ws")
- .with(conf).at(NetUtils.getHostPortString(bindAddress)).start(webApp);
+ WebApps
+ .$for("jobhistory", HistoryClientService.class, this, "ws")
+ .with(conf)
+ .withHttpSpnegoKeytabKey(
+ JHAdminConfig.MR_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
+ .withHttpSpnegoPrincipalKey(
+ JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY)
+ .at(NetUtils.getHostPortString(bindAddress)).start(webApp);
conf.updateConnectAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
webApp.getListenerAddress());
}
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java?rev=1517887&r1=1517886&r2=1517887&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java Tue Aug 27 17:05:13 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapred;
import java.io.IOException;
import java.net.InetSocketAddress;
+import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -49,6 +50,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.NodeState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
@@ -118,8 +120,10 @@ public class ResourceMgrDelegate extends
try {
Set<String> appTypes = new HashSet<String>(1);
appTypes.add(MRJobConfig.MR_APPLICATION_TYPE);
+ EnumSet<YarnApplicationState> appStates =
+ EnumSet.noneOf(YarnApplicationState.class);
return TypeConverter.fromYarnApps(
- client.getApplications(appTypes), this.conf);
+ client.getApplications(appTypes, appStates), this.conf);
} catch (YarnException e) {
throw new IOException(e);
}
@@ -299,12 +303,28 @@ public class ResourceMgrDelegate extends
}
@Override
- public List<ApplicationReport> getApplications(
- Set<String> applicationTypes) throws YarnException, IOException {
+ public List<ApplicationReport> getApplications(Set<String> applicationTypes)
+ throws YarnException,
+ IOException {
return client.getApplications(applicationTypes);
}
@Override
+ public List<ApplicationReport> getApplications(
+ EnumSet<YarnApplicationState> applicationStates) throws YarnException,
+ IOException {
+ return client.getApplications(applicationStates);
+ }
+
+ @Override
+ public List<ApplicationReport> getApplications(
+ Set<String> applicationTypes,
+ EnumSet<YarnApplicationState> applicationStates)
+ throws YarnException, IOException {
+ return client.getApplications(applicationTypes, applicationStates);
+ }
+
+ @Override
public YarnClusterMetrics getYarnClusterMetrics() throws YarnException,
IOException {
return client.getYarnClusterMetrics();
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java?rev=1517887&r1=1517886&r2=1517887&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java Tue Aug 27 17:05:13 2013
@@ -60,48 +60,6 @@ public class TeraInputFormat extends Fil
private static MRJobConfig lastContext = null;
private static List<InputSplit> lastResult = null;
- static class TeraFileSplit extends FileSplit {
- static private String[] ZERO_LOCATIONS = new String[0];
-
- private String[] locations;
-
- public TeraFileSplit() {
- locations = ZERO_LOCATIONS;
- }
- public TeraFileSplit(Path file, long start, long length, String[] hosts) {
- super(file, start, length, hosts);
- try {
- locations = super.getLocations();
- } catch (IOException e) {
- locations = ZERO_LOCATIONS;
- }
- }
-
- // XXXXXX should this also be null-protected?
- protected void setLocations(String[] hosts) {
- locations = hosts;
- }
-
- @Override
- public String[] getLocations() {
- return locations;
- }
-
- public String toString() {
- StringBuffer result = new StringBuffer();
- result.append(getPath());
- result.append(" from ");
- result.append(getStart());
- result.append(" length ");
- result.append(getLength());
- for(String host: getLocations()) {
- result.append(" ");
- result.append(host);
- }
- return result.toString();
- }
- }
-
static class TextSampler implements IndexedSortable {
private ArrayList<Text> records = new ArrayList<Text>();
@@ -325,11 +283,6 @@ public class TeraInputFormat extends Fil
return new TeraRecordReader();
}
- protected FileSplit makeSplit(Path file, long start, long length,
- String[] hosts) {
- return new TeraFileSplit(file, start, length, hosts);
- }
-
@Override
public List<InputSplit> getSplits(JobContext job) throws IOException {
if (job == lastContext) {
@@ -343,7 +296,7 @@ public class TeraInputFormat extends Fil
System.out.println("Spent " + (t2 - t1) + "ms computing base-splits.");
if (job.getConfiguration().getBoolean(TeraScheduler.USE, true)) {
TeraScheduler scheduler = new TeraScheduler(
- lastResult.toArray(new TeraFileSplit[0]), job.getConfiguration());
+ lastResult.toArray(new FileSplit[0]), job.getConfiguration());
lastResult = scheduler.getNewFileSplits();
t3 = System.currentTimeMillis();
System.out.println("Spent " + (t3 - t2) + "ms computing TeraScheduler splits.");
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java?rev=1517887&r1=1517886&r2=1517887&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java Tue Aug 27 17:05:13 2013
@@ -24,7 +24,6 @@ import java.util.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.examples.terasort.TeraInputFormat.TeraFileSplit;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
@@ -214,8 +213,9 @@ class TeraScheduler {
for(int i=0; i < splits.length; ++i) {
if (splits[i].isAssigned) {
// copy the split and fix up the locations
- ((TeraFileSplit) realSplits[i]).setLocations
- (new String[]{splits[i].locations.get(0).hostname});
+ String[] newLocations = {splits[i].locations.get(0).hostname};
+ realSplits[i] = new FileSplit(realSplits[i].getPath(),
+ realSplits[i].getStart(), realSplits[i].getLength(), newLocations);
result[left++] = realSplits[i];
} else {
result[right--] = realSplits[i];