You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by sz...@apache.org on 2012/04/12 00:51:18 UTC
svn commit: r1325052 [3/3] - in
/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project: ./ conf/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/
hadoop-mapreduce-client/hadoop-mapreduce-clie...
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java Wed Apr 11 22:51:10 2012
@@ -22,124 +22,149 @@ import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
-import junit.framework.TestCase;
-
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.net.StandardSocketFactory;
-import org.junit.Ignore;
+import org.junit.Assert;
+import org.junit.Test;
/**
* This class checks that RPCs can use specialized socket factories.
*/
-@Ignore
-public class TestSocketFactory extends TestCase {
+public class TestSocketFactory {
/**
- * Check that we can reach a NameNode or a JobTracker using a specific
+ * Check that we can reach a NameNode or Resource Manager using a specific
* socket factory
*/
+ @Test
public void testSocketFactory() throws IOException {
// Create a standard mini-cluster
Configuration sconf = new Configuration();
- MiniDFSCluster cluster = new MiniDFSCluster(sconf, 1, true, null);
+ MiniDFSCluster cluster = new MiniDFSCluster.Builder(sconf).numDataNodes(1)
+ .build();
final int nameNodePort = cluster.getNameNodePort();
// Get a reference to its DFS directly
FileSystem fs = cluster.getFileSystem();
- assertTrue(fs instanceof DistributedFileSystem);
+ Assert.assertTrue(fs instanceof DistributedFileSystem);
DistributedFileSystem directDfs = (DistributedFileSystem) fs;
- // Get another reference via network using a specific socket factory
- Configuration cconf = new Configuration();
- FileSystem.setDefaultUri(cconf, String.format("hdfs://localhost:%s/",
- nameNodePort + 10));
- cconf.set("hadoop.rpc.socket.factory.class.default",
- "org.apache.hadoop.ipc.DummySocketFactory");
- cconf.set("hadoop.rpc.socket.factory.class.ClientProtocol",
- "org.apache.hadoop.ipc.DummySocketFactory");
- cconf.set("hadoop.rpc.socket.factory.class.JobSubmissionProtocol",
- "org.apache.hadoop.ipc.DummySocketFactory");
+ Configuration cconf = getCustomSocketConfigs(nameNodePort);
fs = FileSystem.get(cconf);
- assertTrue(fs instanceof DistributedFileSystem);
+ Assert.assertTrue(fs instanceof DistributedFileSystem);
DistributedFileSystem dfs = (DistributedFileSystem) fs;
JobClient client = null;
- MiniMRCluster mr = null;
+ MiniMRYarnCluster miniMRYarnCluster = null;
try {
// This will test RPC to the NameNode only.
// could we test Client-DataNode connections?
Path filePath = new Path("/dir");
- assertFalse(directDfs.exists(filePath));
- assertFalse(dfs.exists(filePath));
+ Assert.assertFalse(directDfs.exists(filePath));
+ Assert.assertFalse(dfs.exists(filePath));
directDfs.mkdirs(filePath);
- assertTrue(directDfs.exists(filePath));
- assertTrue(dfs.exists(filePath));
+ Assert.assertTrue(directDfs.exists(filePath));
+ Assert.assertTrue(dfs.exists(filePath));
- // This will test TPC to a JobTracker
+ // This will test RPC to a Resource Manager
fs = FileSystem.get(sconf);
- mr = new MiniMRCluster(1, fs.getUri().toString(), 1);
- final int jobTrackerPort = mr.getJobTrackerPort();
-
+ JobConf jobConf = new JobConf();
+ FileSystem.setDefaultUri(jobConf, fs.getUri().toString());
+ miniMRYarnCluster = initAndStartMiniMRYarnCluster(jobConf);
JobConf jconf = new JobConf(cconf);
- jconf.set("mapred.job.tracker", String.format("localhost:%d",
- jobTrackerPort + 10));
- jconf.set(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME);
+ jconf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
+ String rmAddress = jconf.get("yarn.resourcemanager.address");
+ String[] split = rmAddress.split(":");
+ jconf.set("yarn.resourcemanager.address", split[0] + ':'
+ + (Integer.parseInt(split[1]) + 10));
client = new JobClient(jconf);
JobStatus[] jobs = client.jobsToComplete();
- assertTrue(jobs.length == 0);
+ Assert.assertTrue(jobs.length == 0);
} finally {
- try {
- if (client != null)
- client.close();
- } catch (Exception ignored) {
- // nothing we can do
- ignored.printStackTrace();
- }
- try {
- if (dfs != null)
- dfs.close();
-
- } catch (Exception ignored) {
- // nothing we can do
- ignored.printStackTrace();
- }
- try {
- if (directDfs != null)
- directDfs.close();
-
- } catch (Exception ignored) {
- // nothing we can do
- ignored.printStackTrace();
- }
- try {
- if (cluster != null)
- cluster.shutdown();
-
- } catch (Exception ignored) {
- // nothing we can do
- ignored.printStackTrace();
- }
- if (mr != null) {
- try {
- mr.shutdown();
- } catch (Exception ignored) {
- ignored.printStackTrace();
- }
- }
+ closeClient(client);
+ closeDfs(dfs);
+ closeDfs(directDfs);
+ stopMiniMRYarnCluster(miniMRYarnCluster);
+ shutdownDFSCluster(cluster);
+ }
+ }
+
+ private MiniMRYarnCluster initAndStartMiniMRYarnCluster(JobConf jobConf) {
+ MiniMRYarnCluster miniMRYarnCluster;
+ miniMRYarnCluster = new MiniMRYarnCluster(this.getClass().getName(), 1);
+ miniMRYarnCluster.init(jobConf);
+ miniMRYarnCluster.start();
+ return miniMRYarnCluster;
+ }
+
+ private Configuration getCustomSocketConfigs(final int nameNodePort) {
+ // Get another reference via network using a specific socket factory
+ Configuration cconf = new Configuration();
+ FileSystem.setDefaultUri(cconf, String.format("hdfs://localhost:%s/",
+ nameNodePort + 10));
+ cconf.set("hadoop.rpc.socket.factory.class.default",
+ "org.apache.hadoop.ipc.DummySocketFactory");
+ cconf.set("hadoop.rpc.socket.factory.class.ClientProtocol",
+ "org.apache.hadoop.ipc.DummySocketFactory");
+ cconf.set("hadoop.rpc.socket.factory.class.JobSubmissionProtocol",
+ "org.apache.hadoop.ipc.DummySocketFactory");
+ return cconf;
+ }
+
+ private void shutdownDFSCluster(MiniDFSCluster cluster) {
+ try {
+ if (cluster != null)
+ cluster.shutdown();
+
+ } catch (Exception ignored) {
+ // nothing we can do
+ ignored.printStackTrace();
+ }
+ }
+
+ private void stopMiniMRYarnCluster(MiniMRYarnCluster miniMRYarnCluster) {
+ try {
+ if (miniMRYarnCluster != null)
+ miniMRYarnCluster.stop();
+
+ } catch (Exception ignored) {
+ // nothing we can do
+ ignored.printStackTrace();
+ }
+ }
+
+ private void closeDfs(DistributedFileSystem dfs) {
+ try {
+ if (dfs != null)
+ dfs.close();
+
+ } catch (Exception ignored) {
+ // nothing we can do
+ ignored.printStackTrace();
+ }
+ }
+
+ private void closeClient(JobClient client) {
+ try {
+ if (client != null)
+ client.close();
+ } catch (Exception ignored) {
+ // nothing we can do
+ ignored.printStackTrace();
}
}
}
@@ -155,32 +180,27 @@ class DummySocketFactory extends Standar
public DummySocketFactory() {
}
- /* @inheritDoc */
@Override
public Socket createSocket() throws IOException {
return new Socket() {
@Override
- public void connect(SocketAddress addr, int timeout)
- throws IOException {
+ public void connect(SocketAddress addr, int timeout) throws IOException {
assert (addr instanceof InetSocketAddress);
InetSocketAddress iaddr = (InetSocketAddress) addr;
SocketAddress newAddr = null;
if (iaddr.isUnresolved())
- newAddr =
- new InetSocketAddress(iaddr.getHostName(),
- iaddr.getPort() - 10);
+ newAddr = new InetSocketAddress(iaddr.getHostName(),
+ iaddr.getPort() - 10);
else
- newAddr =
- new InetSocketAddress(iaddr.getAddress(), iaddr.getPort() - 10);
- System.out.printf("Test socket: rerouting %s to %s\n", iaddr,
- newAddr);
+ newAddr = new InetSocketAddress(iaddr.getAddress(),
+ iaddr.getPort() - 10);
+ System.out.printf("Test socket: rerouting %s to %s\n", iaddr, newAddr);
super.connect(newAddr, timeout);
}
};
}
- /* @inheritDoc */
@Override
public boolean equals(Object obj) {
if (this == obj)
@@ -191,11 +211,4 @@ class DummySocketFactory extends Standar
return false;
return true;
}
-
- /* @inheritDoc */
- @Override
- public int hashCode() {
- // Dummy hash code (to make find bugs happy)
- return 53;
- }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java Wed Apr 11 22:51:10 2012
@@ -18,34 +18,63 @@
package org.apache.hadoop.util;
+import java.io.BufferedInputStream;
import java.io.File;
-import org.apache.hadoop.fs.Path;
-import org.junit.Ignore;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.jar.JarOutputStream;
+import java.util.zip.ZipEntry;
-import junit.framework.TestCase;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
/**
* A test to rest the RunJar class.
*/
-@Ignore
-public class TestRunJar extends TestCase {
-
+public class TestRunJar {
+
private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString();
-
+
+ private static final String TEST_JAR_NAME = "testjar.jar";
+ private static final String CLASS_NAME = "Hello.class";
+
+ @Test
public void testRunjar() throws Throwable {
-
- File outFile = new File(TEST_ROOT_DIR, "out");
- // delete if output file already exists.
+ File outFile = new File(TEST_ROOT_DIR, "out");
+ // delete if output file already exists.
if (outFile.exists()) {
outFile.delete();
}
-
+ File makeTestJar = makeTestJar();
+
String[] args = new String[3];
- args[0] = "build/test/mapred/testjar/testjob.jar";
- args[1] = "testjar.Hello";
+ args[0] = makeTestJar.getAbsolutePath();
+ args[1] = "org.apache.hadoop.util.Hello";
args[2] = outFile.toString();
RunJar.main(args);
- assertTrue("RunJar failed", outFile.exists());
+ Assert.assertTrue("RunJar failed", outFile.exists());
+ }
+
+ private File makeTestJar() throws IOException {
+ File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
+ JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile));
+ InputStream entryInputStream = this.getClass().getResourceAsStream(
+ CLASS_NAME);
+ ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
+ jstream.putNextEntry(entry);
+ BufferedInputStream bufInputStream = new BufferedInputStream(
+ entryInputStream, 2048);
+ int count;
+ byte[] data = new byte[2048];
+ while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+ jstream.write(data, 0, count);
+ }
+ jstream.closeEntry();
+ jstream.close();
+
+ return jarFile;
}
-}
+}
\ No newline at end of file
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/bin/yarn
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/bin/yarn?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/bin/yarn (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/bin/yarn Wed Apr 11 22:51:10 2012
@@ -225,6 +225,7 @@ YARN_OPTS="$YARN_OPTS -Dyarn.log.dir=$YA
YARN_OPTS="$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE"
YARN_OPTS="$YARN_OPTS -Dyarn.log.file=$YARN_LOGFILE"
YARN_OPTS="$YARN_OPTS -Dyarn.home.dir=$YARN_HOME"
+YARN_OPTS="$YARN_OPTS -Dhadoop.home.dir=$YARN_HOME"
YARN_OPTS="$YARN_OPTS -Dhadoop.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
YARN_OPTS="$YARN_OPTS -Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java Wed Apr 11 22:51:10 2012
@@ -78,7 +78,6 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
@@ -506,22 +505,20 @@ public class Client {
// It should be provided out of the box.
// For now setting all required classpaths including
// the classpath to "." for the application jar
- String classPathEnv = "${CLASSPATH}"
- + ":./*"
- + ":$HADOOP_CONF_DIR"
- + ":$HADOOP_COMMON_HOME/share/hadoop/common/*"
- + ":$HADOOP_COMMON_HOME/share/hadoop/common/lib/*"
- + ":$HADOOP_HDFS_HOME/share/hadoop/hdfs/*"
- + ":$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*"
- + ":$YARN_HOME/modules/*"
- + ":$YARN_HOME/lib/*"
- + ":./log4j.properties:";
+ StringBuilder classPathEnv = new StringBuilder("${CLASSPATH}:./*");
+ for (String c : conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH)
+ .split(",")) {
+ classPathEnv.append(':');
+ classPathEnv.append(c.trim());
+ }
+ classPathEnv.append(":./log4j.properties");
- // add the runtime classpath needed for tests to work
+ // add the runtime classpath needed for tests to work
String testRuntimeClassPath = Client.getTestRuntimeClasspath();
- classPathEnv += ":" + testRuntimeClassPath;
+ classPathEnv.append(':');
+ classPathEnv.append(testRuntimeClassPath);
- env.put("CLASSPATH", classPathEnv);
+ env.put("CLASSPATH", classPathEnv.toString());
amContainer.setEnvironment(env);
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java Wed Apr 11 22:51:10 2012
@@ -31,6 +31,9 @@ import org.apache.hadoop.net.DNSToSwitch
import org.apache.hadoop.net.Node;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.net.ScriptBasedMapping;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import com.google.common.annotations.VisibleForTesting;
public class RackResolver {
private static DNSToSwitchMapping dnsToSwitchMapping;
@@ -49,10 +52,8 @@ public class RackResolver {
ScriptBasedMapping.class,
DNSToSwitchMapping.class);
try {
- Constructor<? extends DNSToSwitchMapping> dnsToSwitchMappingConstructor
- = dnsToSwitchMappingClass.getConstructor();
- DNSToSwitchMapping newInstance =
- dnsToSwitchMappingConstructor.newInstance();
+ DNSToSwitchMapping newInstance = ReflectionUtils.newInstance(
+ dnsToSwitchMappingClass, conf);
// Wrap around the configured class with the Cached implementation so as
// to save on repetitive lookups.
// Check if the impl is already caching, to avoid double caching.
@@ -99,4 +100,12 @@ public class RackResolver {
LOG.info("Resolved " + hostName + " to " + rName);
return new NodeBase(hostName, rName);
}
+
+ /**
+ * Only used by tests
+ */
+ @VisibleForTesting
+ static DNSToSwitchMapping getDnsToSwitchMapping(){
+ return dnsToSwitchMapping;
+ }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java Wed Apr 11 22:51:10 2012
@@ -52,7 +52,7 @@ public class TestRackResolver {
Assert.assertTrue(numHost1 <= 1);
return returnList;
}
-
+
}
@Test
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/c++/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/c++:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/block_forensics/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/build.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/data_join/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/index/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:r1311518-1325051
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java Wed Apr 11 22:51:10 2012
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.ChecksumExce
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
+import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.DataChecksum;
@@ -441,9 +442,9 @@ public class RaidBlockSender implements
private static class BlockInputStreamFactory implements InputStreamFactory {
private final ExtendedBlock block;
- private final FSDatasetInterface data;
+ private final FsDatasetSpi<?> data;
- private BlockInputStreamFactory(ExtendedBlock block, FSDatasetInterface data) {
+ private BlockInputStreamFactory(ExtendedBlock block, FsDatasetSpi<?> data) {
this.block = block;
this.data = data;
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java Wed Apr 11 22:51:10 2012
@@ -22,6 +22,7 @@ import java.io.*;
import org.apache.hadoop.classification.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.protocol.*;
+import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.security.AccessControlException;
/** Utilities used by RAID for accessing NameNode. */
@@ -35,10 +36,11 @@ public class NameNodeRaidUtil {
return dir.getFileInfo(src, resolveLink);
}
- /** Accessing FSNamesystem.getFileInfo(..) */
+ /** Accessing FSNamesystem.getFileInfo(..)
+ * @throws StandbyException */
public static HdfsFileStatus getFileInfo(final FSNamesystem namesystem,
final String src, final boolean resolveLink
- ) throws AccessControlException, UnresolvedLinkException {
+ ) throws AccessControlException, UnresolvedLinkException, StandbyException {
return namesystem.getFileInfo(src, resolveLink);
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java?rev=1325052&r1=1325051&r2=1325052&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java Wed Apr 11 22:51:10 2012
@@ -622,8 +622,8 @@ public abstract class BlockFixer extends
int idx = rand.nextInt(live.length);
chosen = live[idx];
for (DatanodeInfo avoid: locationsToAvoid) {
- if (chosen.name.equals(avoid.name)) {
- LOG.info("Avoiding " + avoid.name);
+ if (chosen.getName().equals(avoid.getName())) {
+ LOG.info("Avoiding " + avoid.getName());
chosen = null;
break;
}
@@ -632,7 +632,7 @@ public abstract class BlockFixer extends
if (chosen == null) {
throw new IOException("Could not choose datanode");
}
- LOG.info("Choosing datanode " + chosen.name);
+ LOG.info("Choosing datanode " + chosen.getName());
return chosen;
}
@@ -736,7 +736,7 @@ public abstract class BlockFixer extends
DataInputStream metadataIn,
LocatedBlock block, long blockSize)
throws IOException {
- InetSocketAddress target = NetUtils.createSocketAddr(datanode.name);
+ InetSocketAddress target = NetUtils.createSocketAddr(datanode.getName());
Socket sock = SocketChannel.open().socket();
int readTimeout =
@@ -785,7 +785,7 @@ public abstract class BlockFixer extends
1, 0L, blockSize, 0L, DataChecksum.newDataChecksum(metadataIn));
blockSender.sendBlock(out, baseStream);
- LOG.info("Sent block " + block.getBlock() + " to " + datanode.name);
+ LOG.info("Sent block " + block.getBlock() + " to " + datanode.getName());
} finally {
out.close();
}
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/contrib/vaidya/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/examples/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/examples:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/java:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/test/mapred/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:r1311518-1325051
Propchange: hadoop/common/branches/HDFS-3092/hadoop-mapreduce-project/src/webapps/job/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:r1311518-1325051