You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2013/08/12 23:26:02 UTC
svn commit: r1513258 [7/9] - in
/hadoop/common/branches/YARN-321/hadoop-common-project: ./
hadoop-annotations/ hadoop-auth-examples/
hadoop-auth-examples/src/main/webapp/
hadoop-auth-examples/src/main/webapp/annonymous/
hadoop-auth-examples/src/main/we...
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto Mon Aug 12 21:25:49 2013
@@ -62,7 +62,11 @@ message RpcRequestHeaderProto { // the h
optional RpcKindProto rpcKind = 1;
optional OperationProto rpcOp = 2;
- required uint32 callId = 3; // each rpc has a callId that is also used in response
+ required uint32 callId = 3; // a sequence number that is sent back in response
+ required bytes clientId = 4; // Globally unique client ID
+ // clientId + callId uniquely identifies a request
+ // retry count, 1 means this is the first retry
+ optional sint32 retryCount = 5 [default = -1];
}
@@ -126,6 +130,8 @@ message RpcResponseHeaderProto {
optional string exceptionClassName = 4; // if request fails
optional string errorMsg = 5; // if request fails, often contains strack trace
optional RpcErrorCodeProto errorDetail = 6; // in case of error
+ optional bytes clientId = 7; // Globally unique client ID
+ optional sint32 retryCount = 8 [default = -1];
}
message RpcSaslProto {
@@ -135,6 +141,7 @@ message RpcSaslProto {
INITIATE = 2;
CHALLENGE = 3;
RESPONSE = 4;
+ WRAP = 5;
}
message SaslAuth {
@@ -149,4 +156,4 @@ message RpcSaslProto {
required SaslState state = 2;
optional bytes token = 3;
repeated SaslAuth auths = 4;
-}
\ No newline at end of file
+}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Mon Aug 12 21:25:49 2013
@@ -1208,4 +1208,15 @@
</description>
</property>
+<property>
+ <name>fs.client.resolve.remote.symlinks</name>
+ <value>true</value>
+ <description>
+ Whether to resolve symlinks when accessing a remote Hadoop filesystem.
+ Setting this to false causes an exception to be thrown upon encountering
+ a symlink. This setting does not apply to local filesystems, which
+ automatically resolve local symlinks.
+ </description>
+</property>
+
</configuration>
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/webapps/static/hadoop.css
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/webapps/static/hadoop.css?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/webapps/static/hadoop.css (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/webapps/static/hadoop.css Mon Aug 12 21:25:49 2013
@@ -45,8 +45,7 @@ div#dfsnodetable a#title {
font-weight : bolder;
}
-div#dfsnodetable td, th {
- border-bottom-style : none;
+div#dfsnodetable td, th {
padding-bottom : 4px;
padding-top : 4px;
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h Mon Aug 12 21:25:49 2013
@@ -153,4 +153,6 @@ DWORD ChangeFileModeByMask(__in LPCWSTR
DWORD GetLocalGroupsForUser(__in LPCWSTR user,
__out LPLOCALGROUP_USERS_INFO_0 *groups, __out LPDWORD entries);
-BOOL EnablePrivilege(__in LPCWSTR privilegeName);
\ No newline at end of file
+BOOL EnablePrivilege(__in LPCWSTR privilegeName);
+
+void GetLibraryName(__in LPCVOID lpAddress, __out LPWSTR *filename);
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c Mon Aug 12 21:25:49 2013
@@ -1709,3 +1709,51 @@ void ReportErrorCode(LPCWSTR func, DWORD
}
if (msg != NULL) LocalFree(msg);
}
+
+//----------------------------------------------------------------------------
+// Function: GetLibraryName
+//
+// Description:
+// Given an address, get the file name of the library from which it was loaded.
+//
+// Returns:
+// None
+//
+// Notes:
+// - The function allocates heap memory and points the filename out parameter to
+// the newly allocated memory, which will contain the name of the file.
+//
+// - If there is any failure, then the function frees the heap memory it
+// allocated and sets the filename out parameter to NULL.
+//
+void GetLibraryName(LPCVOID lpAddress, LPWSTR *filename)
+{
+ SIZE_T ret = 0;
+ DWORD size = MAX_PATH;
+ HMODULE mod = NULL;
+ DWORD err = ERROR_SUCCESS;
+
+ MEMORY_BASIC_INFORMATION mbi;
+ ret = VirtualQuery(lpAddress, &mbi, sizeof(mbi));
+ if (ret == 0) goto cleanup;
+ mod = mbi.AllocationBase;
+
+ do {
+ *filename = (LPWSTR) realloc(*filename, size * sizeof(WCHAR));
+ if (*filename == NULL) goto cleanup;
+ GetModuleFileName(mod, *filename, size);
+ size <<= 1;
+ err = GetLastError();
+ } while (err == ERROR_INSUFFICIENT_BUFFER);
+
+ if (err != ERROR_SUCCESS) goto cleanup;
+
+ return;
+
+cleanup:
+ if (*filename != NULL)
+ {
+ free(*filename);
+ *filename = NULL;
+ }
+}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm Mon Aug 12 21:25:49 2013
@@ -381,6 +381,7 @@ Administration Commands
*-----------------+-----------------------------------------------------------+
| -metasave filename | Save Namenode's primary data structures to <filename> in
| the directory specified by hadoop.log.dir property.
+ | <filename> is overwritten if it exists.
| <filename> will contain one line for each of the following\
| 1. Datanodes heart beating with Namenode\
| 2. Blocks waiting to be replicated\
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/Compatibility.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/Compatibility.apt.vm?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/Compatibility.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/Compatibility.apt.vm Mon Aug 12 21:25:49 2013
@@ -517,7 +517,7 @@ hand-in-hand to address this.
* Annotations for interfaces as per interface classification
schedule -
{{{https://issues.apache.org/jira/browse/HADOOP-7391}HADOOP-7391}}
- {{{InterfaceClassification.html}Hadoop Interface Classification}}
+ {{{./InterfaceClassification.html}Hadoop Interface Classification}}
* Compatibility for Hadoop 1.x releases -
{{{https://issues.apache.org/jira/browse/HADOOP-5071}HADOOP-5071}}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm Mon Aug 12 21:25:49 2013
@@ -45,6 +45,27 @@ bin/hadoop fs <args>
Differences are described with each of the commands. Error information is
sent to stderr and the output is sent to stdout.
+appendToFile
+
+ Usage: <<<hdfs dfs -appendToFile <localsrc> ... <dst> >>>
+
+ Append single src, or multiple srcs from local file system to the
+ destination file system. Also reads input from stdin and appends to
+ destination file system.
+
+ * <<<hdfs dfs -appendToFile localfile /user/hadoop/hadoopfile>>>
+
+ * <<<hdfs dfs -appendToFile localfile1 localfile2 /user/hadoop/hadoopfile>>>
+
+ * <<<hdfs dfs -appendToFile localfile hdfs://nn.example.com/hadoop/hadoopfile>>>
+
+ * <<<hdfs dfs -appendToFile - hdfs://nn.example.com/hadoop/hadoopfile>>>
+ Reads the input from stdin.
+
+ Exit Code:
+
+ Returns 0 on success and 1 on error.
+
cat
Usage: <<<hdfs dfs -cat URI [URI ...]>>>
@@ -76,7 +97,7 @@ chmod
Change the permissions of files. With -R, make the change recursively
through the directory structure. The user must be the owner of the file, or
- else a super-user. Additional information is in the
+ else a super-user. Additional information is in the
{{{betterurl}Permissions Guide}}.
chown
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm Mon Aug 12 21:25:49 2013
@@ -221,7 +221,7 @@ Single Node Setup
* Fully-Distributed Operation
For information on setting up fully-distributed, non-trivial clusters
- see {{{Cluster Setup}}}.
+ see {{{./ClusterSetup.html}Cluster Setup}}.
Java and JNI are trademarks or registered trademarks of Sun
Microsystems, Inc. in the United States and other countries.
Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core:r1503799-1513205
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1505610,1507165,1507259,1509070
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Mon Aug 12 21:25:49 2013
@@ -21,9 +21,11 @@ import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
+import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
+import java.io.OutputStreamWriter;
import java.io.StringWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
@@ -44,6 +46,7 @@ import static org.junit.Assert.assertArr
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
import org.codehaus.jackson.map.ObjectMapper;
@@ -53,6 +56,10 @@ public class TestConfiguration extends T
private Configuration conf;
final static String CONFIG = new File("./test-config-TestConfiguration.xml").getAbsolutePath();
final static String CONFIG2 = new File("./test-config2-TestConfiguration.xml").getAbsolutePath();
+ private static final String CONFIG_MULTI_BYTE = new File(
+ "./test-config-multi-byte-TestConfiguration.xml").getAbsolutePath();
+ private static final String CONFIG_MULTI_BYTE_SAVED = new File(
+ "./test-config-multi-byte-saved-TestConfiguration.xml").getAbsolutePath();
final static Random RAN = new Random();
final static String XMLHEADER =
IBM_JAVA?"<?xml version=\"1.0\" encoding=\"UTF-8\"?><configuration>":
@@ -69,6 +76,8 @@ public class TestConfiguration extends T
super.tearDown();
new File(CONFIG).delete();
new File(CONFIG2).delete();
+ new File(CONFIG_MULTI_BYTE).delete();
+ new File(CONFIG_MULTI_BYTE_SAVED).delete();
}
private void startConfig() throws IOException{
@@ -101,6 +110,41 @@ public class TestConfiguration extends T
assertEquals("A", conf.get("prop"));
}
+ /**
+ * Tests use of multi-byte characters in property names and values. This test
+ * round-trips multi-byte string literals through saving and loading of config
+ * and asserts that the same values were read.
+ */
+ public void testMultiByteCharacters() throws IOException {
+ String priorDefaultEncoding = System.getProperty("file.encoding");
+ try {
+ System.setProperty("file.encoding", "US-ASCII");
+ String name = "multi_byte_\u611b_name";
+ String value = "multi_byte_\u0641_value";
+ out = new BufferedWriter(new OutputStreamWriter(
+ new FileOutputStream(CONFIG_MULTI_BYTE), "UTF-8"));
+ startConfig();
+ declareProperty(name, value, value);
+ endConfig();
+
+ Configuration conf = new Configuration(false);
+ conf.addResource(new Path(CONFIG_MULTI_BYTE));
+ assertEquals(value, conf.get(name));
+ FileOutputStream fos = new FileOutputStream(CONFIG_MULTI_BYTE_SAVED);
+ try {
+ conf.writeXml(fos);
+ } finally {
+ IOUtils.closeStream(fos);
+ }
+
+ conf = new Configuration(false);
+ conf.addResource(new Path(CONFIG_MULTI_BYTE_SAVED));
+ assertEquals(value, conf.get(name));
+ } finally {
+ System.setProperty("file.encoding", priorDefaultEncoding);
+ }
+ }
+
public void testVariableSubstitution() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSTestWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSTestWrapper.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSTestWrapper.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSTestWrapper.java Mon Aug 12 21:25:49 2013
@@ -71,7 +71,8 @@ public abstract class FSTestWrapper impl
public String getAbsoluteTestRootDir() throws IOException {
if (absTestRootDir == null) {
- if (testRootDir.startsWith("/")) {
+ Path testRootPath = new Path(testRootDir);
+ if (testRootPath.isAbsolute()) {
absTestRootDir = testRootDir;
} else {
absTestRootDir = getWorkingDirectory().toString() + "/"
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSWrapper.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSWrapper.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSWrapper.java Mon Aug 12 21:25:49 2013
@@ -109,4 +109,7 @@ public interface FSWrapper {
abstract public FileStatus[] listStatus(final Path f)
throws AccessControlException, FileNotFoundException,
UnsupportedFileSystemException, IOException;
+
+ abstract public FileStatus[] globStatus(Path pathPattern, PathFilter filter)
+ throws IOException;
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java Mon Aug 12 21:25:49 2013
@@ -332,4 +332,10 @@ public final class FileContextTestWrappe
FileNotFoundException, UnsupportedFileSystemException, IOException {
return fc.util().listStatus(f);
}
+
+ @Override
+ public FileStatus[] globStatus(Path pathPattern, PathFilter filter)
+ throws IOException {
+ return fc.util().globStatus(pathPattern, filter);
+ }
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java Mon Aug 12 21:25:49 2013
@@ -397,4 +397,10 @@ public final class FileSystemTestWrapper
FileNotFoundException, UnsupportedFileSystemException, IOException {
return fs.listStatus(f);
}
+
+ @Override
+ public FileStatus[] globStatus(Path pathPattern, PathFilter filter)
+ throws IOException {
+ return fs.globStatus(pathPattern, filter);
+ }
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/SymlinkBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/SymlinkBaseTest.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/SymlinkBaseTest.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/SymlinkBaseTest.java Mon Aug 12 21:25:49 2013
@@ -20,13 +20,10 @@ package org.apache.hadoop.fs;
import java.io.*;
import java.net.URI;
import java.util.EnumSet;
-import org.apache.hadoop.fs.FileContext;
+
import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.fs.CreateFlag;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.test.GenericTestUtils;
import static org.junit.Assert.*;
@@ -51,6 +48,13 @@ public abstract class SymlinkBaseTest {
abstract protected String testBaseDir2() throws IOException;
abstract protected URI testURI();
+ // Returns true if the filesystem is emulating symlink support. Certain
+ // checks will be bypassed if that is the case.
+ //
+ protected boolean emulatingSymlinksOnWindows() {
+ return false;
+ }
+
protected IOException unwrapException(IOException e) {
return e;
}
@@ -156,8 +160,11 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Try to create a directory given a path that refers to a symlink */
public void testMkdirExistingLink() throws IOException {
+ Path file = new Path(testBaseDir1() + "/targetFile");
+ createAndWriteFile(file);
+
Path dir = new Path(testBaseDir1()+"/link");
- wrapper.createSymlink(new Path("/doesNotExist"), dir, false);
+ wrapper.createSymlink(file, dir, false);
try {
wrapper.mkdir(dir, FileContext.DEFAULT_PERM, false);
fail("Created a dir where a symlink exists");
@@ -224,6 +231,7 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Stat a link to a file */
public void testStatLinkToFile() throws IOException {
+ assumeTrue(!emulatingSymlinksOnWindows());
Path file = new Path(testBaseDir1()+"/file");
Path linkToFile = new Path(testBaseDir1()+"/linkToFile");
createAndWriteFile(file);
@@ -232,8 +240,7 @@ public abstract class SymlinkBaseTest {
assertTrue(wrapper.isSymlink(linkToFile));
assertTrue(wrapper.isFile(linkToFile));
assertFalse(wrapper.isDir(linkToFile));
- assertEquals(file.toUri().getPath(),
- wrapper.getLinkTarget(linkToFile).toString());
+ assertEquals(file, wrapper.getLinkTarget(linkToFile));
// The local file system does not fully resolve the link
// when obtaining the file status
if (!"file".equals(getScheme())) {
@@ -277,8 +284,7 @@ public abstract class SymlinkBaseTest {
assertFalse(wrapper.isFile(linkToDir));
assertTrue(wrapper.isDir(linkToDir));
- assertEquals(dir.toUri().getPath(),
- wrapper.getLinkTarget(linkToDir).toString());
+ assertEquals(dir, wrapper.getLinkTarget(linkToDir));
}
@Test(timeout=10000)
@@ -351,6 +357,12 @@ public abstract class SymlinkBaseTest {
/* Assert that the given link to a file behaves as expected. */
private void checkLink(Path linkAbs, Path expectedTarget, Path targetQual)
throws IOException {
+
+ // If we are emulating symlinks then many of these checks will fail
+ // so we skip them.
+ //
+ assumeTrue(!emulatingSymlinksOnWindows());
+
Path dir = new Path(testBaseDir1());
// isFile/Directory
assertTrue(wrapper.isFile(linkAbs));
@@ -400,7 +412,7 @@ public abstract class SymlinkBaseTest {
failureExpected = false;
}
try {
- readFile(new Path(getScheme()+"://"+testBaseDir1()+"/linkToFile"));
+ readFile(new Path(getScheme()+":///"+testBaseDir1()+"/linkToFile"));
assertFalse(failureExpected);
} catch (Exception e) {
if (!failureExpected) {
@@ -646,6 +658,7 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Create symlink through a symlink */
public void testCreateLinkViaLink() throws IOException {
+ assumeTrue(!emulatingSymlinksOnWindows());
Path dir1 = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
@@ -688,6 +701,7 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Test create symlink using the same path */
public void testCreateLinkTwice() throws IOException {
+ assumeTrue(!emulatingSymlinksOnWindows());
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
@@ -783,7 +797,7 @@ public abstract class SymlinkBaseTest {
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "test/file");
// Symlink to .. is not a problem since the .. is squashed early
- assertEquals(testBaseDir1(), dotDot.toString());
+ assertEquals(new Path(testBaseDir1()), dotDot);
createAndWriteFile(file);
wrapper.createSymlink(dotDot, linkToDir, false);
readFile(fileViaLink);
@@ -876,7 +890,8 @@ public abstract class SymlinkBaseTest {
assertFalse(wrapper.exists(linkViaLink));
// Check that we didn't rename the link target
assertTrue(wrapper.exists(file));
- assertTrue(wrapper.getFileLinkStatus(linkNewViaLink).isSymlink());
+ assertTrue(wrapper.getFileLinkStatus(linkNewViaLink).isSymlink() ||
+ emulatingSymlinksOnWindows());
readFile(linkNewViaLink);
}
@@ -1014,7 +1029,8 @@ public abstract class SymlinkBaseTest {
createAndWriteFile(file);
wrapper.createSymlink(file, link1, false);
wrapper.rename(link1, link2);
- assertTrue(wrapper.getFileLinkStatus(link2).isSymlink());
+ assertTrue(wrapper.getFileLinkStatus(link2).isSymlink() ||
+ emulatingSymlinksOnWindows());
readFile(link2);
readFile(file);
assertFalse(wrapper.exists(link1));
@@ -1038,8 +1054,11 @@ public abstract class SymlinkBaseTest {
}
wrapper.rename(link, file1, Rename.OVERWRITE);
assertFalse(wrapper.exists(link));
- assertTrue(wrapper.getFileLinkStatus(file1).isSymlink());
- assertEquals(file2, wrapper.getLinkTarget(file1));
+
+ if (!emulatingSymlinksOnWindows()) {
+ assertTrue(wrapper.getFileLinkStatus(file1).isSymlink());
+ assertEquals(file2, wrapper.getLinkTarget(file1));
+ }
}
@Test(timeout=10000)
@@ -1078,16 +1097,21 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Rename a symlink to itself */
public void testRenameSymlinkToItself() throws IOException {
+ Path file = new Path(testBaseDir1(), "file");
+ createAndWriteFile(file);
+
Path link = new Path(testBaseDir1(), "linkToFile1");
- wrapper.createSymlink(new Path("/doestNotExist"), link, false);
+ wrapper.createSymlink(file, link, false);
try {
wrapper.rename(link, link);
+ fail("Failed to get expected IOException");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
// Fails with overwrite as well
try {
wrapper.rename(link, link, Rename.OVERWRITE);
+ fail("Failed to get expected IOException");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
@@ -1096,6 +1120,7 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Rename a symlink */
public void testRenameSymlink() throws IOException {
+ assumeTrue(!emulatingSymlinksOnWindows());
Path file = new Path(testBaseDir1(), "file");
Path link1 = new Path(testBaseDir1(), "linkToFile1");
Path link2 = new Path(testBaseDir1(), "linkToFile2");
@@ -1193,6 +1218,7 @@ public abstract class SymlinkBaseTest {
@Test(timeout=10000)
/** Test rename the symlink's target */
public void testRenameLinkTarget() throws IOException {
+ assumeTrue(!emulatingSymlinksOnWindows());
Path file = new Path(testBaseDir1(), "file");
Path fileNew = new Path(testBaseDir1(), "fileNew");
Path link = new Path(testBaseDir1(), "linkToFile");
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java Mon Aug 12 21:25:49 2013
@@ -793,6 +793,8 @@ public class TestFileUtil {
}
}
List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));
+ Collections.sort(expectedClassPaths);
+ Collections.sort(actualClassPaths);
Assert.assertEquals(expectedClassPaths, actualClassPaths);
} finally {
if (jarFile != null) {
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java Mon Aug 12 21:25:49 2013
@@ -20,10 +20,12 @@ package org.apache.hadoop.fs;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
@@ -32,8 +34,11 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.shell.FsCommand;
+import org.apache.hadoop.fs.shell.PathData;
import org.apache.hadoop.io.IOUtils;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
+import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -377,6 +382,65 @@ public class TestFsShellReturnCode {
}
+ /**
+ * Tests combinations of valid and invalid user and group arguments to chown.
+ */
+ @Test
+ public void testChownUserAndGroupValidity() {
+ // This test only covers argument parsing, so override to skip processing.
+ FsCommand chown = new FsShellPermissions.Chown() {
+ @Override
+ protected void processArgument(PathData item) {
+ }
+ };
+ chown.setConf(new Configuration());
+
+ // The following are valid (no exception expected).
+ chown.run("user", "/path");
+ chown.run("user:group", "/path");
+ chown.run(":group", "/path");
+
+ // The following are valid only on Windows.
+ assertValidArgumentsOnWindows(chown, "User With Spaces", "/path");
+ assertValidArgumentsOnWindows(chown, "User With Spaces:group", "/path");
+ assertValidArgumentsOnWindows(chown, "User With Spaces:Group With Spaces",
+ "/path");
+ assertValidArgumentsOnWindows(chown, "user:Group With Spaces", "/path");
+ assertValidArgumentsOnWindows(chown, ":Group With Spaces", "/path");
+
+ // The following are invalid (exception expected).
+ assertIllegalArguments(chown, "us!er", "/path");
+ assertIllegalArguments(chown, "us^er", "/path");
+ assertIllegalArguments(chown, "user:gr#oup", "/path");
+ assertIllegalArguments(chown, "user:gr%oup", "/path");
+ assertIllegalArguments(chown, ":gr#oup", "/path");
+ assertIllegalArguments(chown, ":gr%oup", "/path");
+ }
+
+ /**
+ * Tests valid and invalid group arguments to chgrp.
+ */
+ @Test
+ public void testChgrpGroupValidity() {
+ // This test only covers argument parsing, so override to skip processing.
+ FsCommand chgrp = new FsShellPermissions.Chgrp() {
+ @Override
+ protected void processArgument(PathData item) {
+ }
+ };
+ chgrp.setConf(new Configuration());
+
+ // The following are valid (no exception expected).
+ chgrp.run("group", "/path");
+
+ // The following are valid only on Windows.
+ assertValidArgumentsOnWindows(chgrp, "Group With Spaces", "/path");
+
+ // The following are invalid (exception expected).
+ assertIllegalArguments(chgrp, ":gr#oup", "/path");
+ assertIllegalArguments(chgrp, ":gr%oup", "/path");
+ }
+
static class LocalFileSystemExtn extends LocalFileSystem {
public LocalFileSystemExtn() {
super(new RawLocalFileSystemExtn());
@@ -425,4 +489,37 @@ public class TestFsShellReturnCode {
return stat;
}
}
+
+ /**
+ * Asserts that for the given command, the given arguments are considered
+ * invalid. The expectation is that the command will throw
+ * IllegalArgumentException.
+ *
+ * @param cmd FsCommand to check
+ * @param args String... arguments to check
+ */
+ private static void assertIllegalArguments(FsCommand cmd, String... args) {
+ try {
+ cmd.run(args);
+ fail("Expected IllegalArgumentException from args: " +
+ Arrays.toString(args));
+ } catch (IllegalArgumentException e) {
+ }
+ }
+
+ /**
+ * Asserts that for the given command, the given arguments are considered valid
+ * on Windows, but invalid elsewhere.
+ *
+ * @param cmd FsCommand to check
+ * @param args String... arguments to check
+ */
+ private static void assertValidArgumentsOnWindows(FsCommand cmd,
+ String... args) {
+ if (Shell.WINDOWS) {
+ cmd.run(args);
+ } else {
+ assertIllegalArguments(cmd, args);
+ }
+ }
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java Mon Aug 12 21:25:49 2013
@@ -82,7 +82,7 @@ public class TestHarFileSystemBasics {
localFileSystem.createNewFile(masterIndexPath);
assertTrue(localFileSystem.exists(masterIndexPath));
- writeVersionToMasterIndexImpl(HarFileSystem.VERSION);
+ writeVersionToMasterIndexImpl(HarFileSystem.VERSION, masterIndexPath);
final HarFileSystem harFileSystem = new HarFileSystem(localFileSystem);
final URI uri = new URI("har://" + harPath.toString());
@@ -90,8 +90,25 @@ public class TestHarFileSystemBasics {
return harFileSystem;
}
- private void writeVersionToMasterIndexImpl(int version) throws IOException {
- final Path masterIndexPath = new Path(harPath, "_masterindex");
+ private HarFileSystem createHarFileSystem(final Configuration conf, Path aHarPath)
+ throws Exception {
+ localFileSystem.mkdirs(aHarPath);
+ final Path indexPath = new Path(aHarPath, "_index");
+ final Path masterIndexPath = new Path(aHarPath, "_masterindex");
+ localFileSystem.createNewFile(indexPath);
+ assertTrue(localFileSystem.exists(indexPath));
+ localFileSystem.createNewFile(masterIndexPath);
+ assertTrue(localFileSystem.exists(masterIndexPath));
+
+ writeVersionToMasterIndexImpl(HarFileSystem.VERSION, masterIndexPath);
+
+ final HarFileSystem harFileSystem = new HarFileSystem(localFileSystem);
+ final URI uri = new URI("har://" + aHarPath.toString());
+ harFileSystem.initialize(uri, conf);
+ return harFileSystem;
+ }
+
+ private void writeVersionToMasterIndexImpl(int version, Path masterIndexPath) throws IOException {
// write Har version into the master index:
final FSDataOutputStream fsdos = localFileSystem.create(masterIndexPath);
try {
@@ -173,6 +190,29 @@ public class TestHarFileSystemBasics {
}
@Test
+ public void testPositiveLruMetadataCacheFs() throws Exception {
+ // Init 2nd har file system on the same underlying FS, so the
+ // metadata gets reused:
+ HarFileSystem hfs = new HarFileSystem(localFileSystem);
+ URI uri = new URI("har://" + harPath.toString());
+ hfs.initialize(uri, new Configuration());
+ // the metadata should be reused from cache:
+ assertTrue(hfs.getMetadata() == harFileSystem.getMetadata());
+
+ // Create more hars, until the cache is full + 1; the last creation should evict the first entry from the cache
+ for (int i = 0; i <= hfs.METADATA_CACHE_ENTRIES_DEFAULT; i++) {
+ Path p = new Path(rootPath, "path1/path2/my" + i +".har");
+ createHarFileSystem(conf, p);
+ }
+
+ // The first entry should not be in the cache anymore:
+ hfs = new HarFileSystem(localFileSystem);
+ uri = new URI("har://" + harPath.toString());
+ hfs.initialize(uri, new Configuration());
+ assertTrue(hfs.getMetadata() != harFileSystem.getMetadata());
+ }
+
+ @Test
public void testPositiveInitWithoutUnderlyingFS() throws Exception {
// Init HarFS with no constructor arg, so that the underlying FS object
// is created on demand or got from cache in #initialize() method.
@@ -218,7 +258,7 @@ public class TestHarFileSystemBasics {
// time with 1 second accuracy:
Thread.sleep(1000);
// write an unsupported version:
- writeVersionToMasterIndexImpl(7777);
+ writeVersionToMasterIndexImpl(7777, new Path(harPath, "_masterindex"));
// init the Har:
final HarFileSystem hfs = new HarFileSystem(localFileSystem);
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Mon Aug 12 21:25:49 2013
@@ -417,6 +417,88 @@ public class TestLocalFileSystem {
stm.close();
}
}
+
+ /**
+ * Tests a simple rename of a directory.
+ */
+ @Test
+ public void testRenameDirectory() throws IOException {
+ Path src = new Path(TEST_ROOT_DIR, "dir1");
+ Path dst = new Path(TEST_ROOT_DIR, "dir2");
+ fileSys.delete(src, true);
+ fileSys.delete(dst, true);
+ assertTrue(fileSys.mkdirs(src));
+ assertTrue(fileSys.rename(src, dst));
+ assertTrue(fileSys.exists(dst));
+ assertFalse(fileSys.exists(src));
+ }
+
+ /**
+ * Tests that renaming a directory replaces the destination if the destination
+ * is an existing empty directory.
+ *
+ * Before:
+ * /dir1
+ * /file1
+ * /file2
+ * /dir2
+ *
+ * After rename("/dir1", "/dir2"):
+ * /dir2
+ * /file1
+ * /file2
+ */
+ @Test
+ public void testRenameReplaceExistingEmptyDirectory() throws IOException {
+ Path src = new Path(TEST_ROOT_DIR, "dir1");
+ Path dst = new Path(TEST_ROOT_DIR, "dir2");
+ fileSys.delete(src, true);
+ fileSys.delete(dst, true);
+ assertTrue(fileSys.mkdirs(src));
+ writeFile(fileSys, new Path(src, "file1"), 1);
+ writeFile(fileSys, new Path(src, "file2"), 1);
+ assertTrue(fileSys.mkdirs(dst));
+ assertTrue(fileSys.rename(src, dst));
+ assertTrue(fileSys.exists(dst));
+ assertTrue(fileSys.exists(new Path(dst, "file1")));
+ assertTrue(fileSys.exists(new Path(dst, "file2")));
+ assertFalse(fileSys.exists(src));
+ }
+
+ /**
+ * Tests that renaming a directory to an existing directory that is not empty
+ * results in a full copy of source to destination.
+ *
+ * Before:
+ * /dir1
+ * /dir2
+ * /dir3
+ * /file1
+ * /file2
+ *
+ * After rename("/dir1/dir2/dir3", "/dir1"):
+ * /dir1
+ * /dir3
+ * /file1
+ * /file2
+ */
+ @Test
+ public void testRenameMoveToExistingNonEmptyDirectory() throws IOException {
+ Path src = new Path(TEST_ROOT_DIR, "dir1/dir2/dir3");
+ Path dst = new Path(TEST_ROOT_DIR, "dir1");
+ fileSys.delete(src, true);
+ fileSys.delete(dst, true);
+ assertTrue(fileSys.mkdirs(src));
+ writeFile(fileSys, new Path(src, "file1"), 1);
+ writeFile(fileSys, new Path(src, "file2"), 1);
+ assertTrue(fileSys.exists(dst));
+ assertTrue(fileSys.rename(src, dst));
+ assertTrue(fileSys.exists(dst));
+ assertTrue(fileSys.exists(new Path(dst, "dir3")));
+ assertTrue(fileSys.exists(new Path(dst, "dir3/file1")));
+ assertTrue(fileSys.exists(new Path(dst, "dir3/file2")));
+ assertFalse(fileSys.exists(src));
+ }
private void verifyRead(FSDataInputStream stm, byte[] fileContents,
int seekOff, int toRead) throws IOException {
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Mon Aug 12 21:25:49 2013
@@ -28,11 +28,38 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.io.AvroTestUtil;
import org.apache.hadoop.util.Shell;
-import junit.framework.TestCase;
+import com.google.common.base.Joiner;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
public class TestPath extends TestCase {
+ /**
+ * Merge a bunch of Path objects into a sorted semicolon-separated
+ * path string.
+ */
+ public static String mergeStatuses(Path paths[]) {
+ String pathStrings[] = new String[paths.length];
+ int i = 0;
+ for (Path path : paths) {
+ pathStrings[i++] = path.toUri().getPath();
+ }
+ Arrays.sort(pathStrings);
+ return Joiner.on(";").join(pathStrings);
+ }
+
+ /**
+ * Merge a bunch of FileStatus objects into a sorted semicolon-separated
+ * path string.
+ */
+ public static String mergeStatuses(FileStatus statuses[]) {
+ Path paths[] = new Path[statuses.length];
+ int i = 0;
+ for (FileStatus status : statuses) {
+ paths[i++] = status.getPath();
+ }
+ return mergeStatuses(paths);
+ }
+
@Test (timeout = 30000)
public void testToString() {
toStringTest("/");
@@ -352,10 +379,11 @@ public class TestPath extends TestCase {
// ensure globStatus with "*" finds all dir contents
stats = lfs.globStatus(new Path(testRoot, "*"));
Arrays.sort(stats);
- assertEquals(paths.length, stats.length);
- for (int i=0; i < paths.length; i++) {
- assertEquals(paths[i].getParent(), stats[i].getPath());
+ Path parentPaths[] = new Path[paths.length];
+ for (int i = 0; i < paths.length; i++) {
+ parentPaths[i] = paths[i].getParent();
}
+ assertEquals(mergeStatuses(parentPaths), mergeStatuses(stats));
// ensure that globStatus with an escaped "\*" only finds "*"
stats = lfs.globStatus(new Path(testRoot, "\\*"));
@@ -365,9 +393,7 @@ public class TestPath extends TestCase {
// try to glob the inner file for all dirs
stats = lfs.globStatus(new Path(testRoot, "*/f"));
assertEquals(paths.length, stats.length);
- for (int i=0; i < paths.length; i++) {
- assertEquals(paths[i], stats[i].getPath());
- }
+ assertEquals(mergeStatuses(paths), mergeStatuses(stats));
// try to get the inner file for only the "*" dir
stats = lfs.globStatus(new Path(testRoot, "\\*/f"));
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java Mon Aug 12 21:25:49 2013
@@ -30,6 +30,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Shell;
import org.junit.Test;
/**
@@ -62,6 +63,16 @@ abstract public class TestSymlinkLocalFS
}
@Override
+ protected boolean emulatingSymlinksOnWindows() {
+ // Java 6 on Windows has very poor symlink support. Specifically
+ // Specifically File#length and File#renameTo do not work as expected.
+ // (see HADOOP-9061 for additional details)
+ // Hence some symlink tests will be skipped.
+ //
+ return (Shell.WINDOWS && !Shell.isJava7OrAbove());
+ }
+
+ @Override
public void testCreateDanglingLink() throws IOException {
// Dangling symlinks are not supported on Windows local file system.
assumeTrue(!Path.WINDOWS);
@@ -171,6 +182,7 @@ abstract public class TestSymlinkLocalFS
* file scheme (eg file://host/tmp/test).
*/
public void testGetLinkStatusPartQualTarget() throws IOException {
+ assumeTrue(!emulatingSymlinksOnWindows());
Path fileAbs = new Path(testBaseDir1()+"/file");
Path fileQual = new Path(testURI().toString(), fileAbs);
Path dir = new Path(testBaseDir1());
@@ -205,4 +217,14 @@ abstract public class TestSymlinkLocalFS
// Excpected.
}
}
+
+ /** Test create symlink to . */
+ @Override
+ public void testCreateLinkToDot() throws IOException {
+ try {
+ super.testCreateLinkToDot();
+ } catch (IllegalArgumentException iae) {
+ // Expected.
+ }
+ }
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java Mon Aug 12 21:25:49 2013
@@ -17,8 +17,13 @@
*/
package org.apache.hadoop.fs;
+import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass;
+import java.io.IOException;
+
+import static org.junit.Assume.assumeTrue;
+
public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS {
@BeforeClass
@@ -27,4 +32,9 @@ public class TestSymlinkLocalFSFileConte
wrapper = new FileContextTestWrapper(context);
}
+ @Override
+ public void testRenameFileWithDestParentSymlink() throws IOException {
+ assumeTrue(!Shell.WINDOWS);
+ super.testRenameFileWithDestParentSymlink();
+ }
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java Mon Aug 12 21:25:49 2013
@@ -17,13 +17,20 @@
*/
package org.apache.hadoop.fs;
+import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Options.Rename;
+import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS {
@BeforeClass
@@ -54,4 +61,36 @@ public class TestSymlinkLocalFSFileSyste
@Override
@Test(timeout=1000)
public void testAccessFileViaInterSymlinkAbsTarget() throws IOException {}
+
+ @Override
+ public void testRenameFileWithDestParentSymlink() throws IOException {
+ assumeTrue(!Shell.WINDOWS);
+ super.testRenameFileWithDestParentSymlink();
+ }
+
+ @Override
+ @Test(timeout=10000)
+ /** Rename a symlink to itself */
+ public void testRenameSymlinkToItself() throws IOException {
+ Path file = new Path(testBaseDir1(), "file");
+ createAndWriteFile(file);
+
+ Path link = new Path(testBaseDir1(), "linkToFile1");
+ wrapper.createSymlink(file, link, false);
+ try {
+ wrapper.rename(link, link);
+ fail("Failed to get expected IOException");
+ } catch (IOException e) {
+ assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
+ }
+ // Fails with overwrite as well
+ try {
+ wrapper.rename(link, link, Rename.OVERWRITE);
+ fail("Failed to get expected IOException");
+ } catch (IOException e) {
+ // Todo: Fix this test when HADOOP-9819 is fixed.
+ assertTrue(unwrapException(e) instanceof FileAlreadyExistsException ||
+ unwrapException(e) instanceof FileNotFoundException);
+ }
+ }
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Mon Aug 12 21:25:49 2013
@@ -132,6 +132,14 @@ public class TestCodec {
public void testLz4Codec() throws IOException {
if (NativeCodeLoader.isNativeCodeLoaded()) {
if (Lz4Codec.isNativeCodeLoaded()) {
+ conf.setBoolean(
+ CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
+ false);
+ codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
+ codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
+ conf.setBoolean(
+ CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
+ true);
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.Lz4Codec");
} else {
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Mon Aug 12 21:25:49 2013
@@ -84,7 +84,7 @@ public class TestNativeIO {
}
assertEquals(expectedOwner, owner);
assertNotNull(stat.getGroup());
- assertTrue(!"".equals(stat.getGroup()));
+ assertTrue(!stat.getGroup().isEmpty());
assertEquals("Stat mode field should indicate a regular file",
NativeIO.POSIX.Stat.S_IFREG,
stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
@@ -118,7 +118,7 @@ public class TestNativeIO {
NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
assertEquals(System.getProperty("user.name"), stat.getOwner());
assertNotNull(stat.getGroup());
- assertTrue(!"".equals(stat.getGroup()));
+ assertTrue(!stat.getGroup().isEmpty());
assertEquals("Stat mode field should indicate a regular file",
NativeIO.POSIX.Stat.S_IFREG,
stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java Mon Aug 12 21:25:49 2013
@@ -28,7 +28,6 @@ import org.apache.hadoop.ipc.StandbyExce
import org.apache.hadoop.util.ThreadUtil;
import org.junit.Test;
-@SuppressWarnings("unchecked")
public class TestFailoverProxy {
public static class FlipFlopProxyProvider<T> implements FailoverProxyProvider<T> {
@@ -78,21 +77,35 @@ public class TestFailoverProxy {
@Override
public RetryAction shouldRetry(Exception e, int retries, int failovers,
- boolean isMethodIdempotent) {
+ boolean isIdempotentOrAtMostOnce) {
return failovers < 1 ? RetryAction.FAILOVER_AND_RETRY : RetryAction.FAIL;
}
}
+ private static FlipFlopProxyProvider<UnreliableInterface>
+ newFlipFlopProxyProvider() {
+ return new FlipFlopProxyProvider<UnreliableInterface>(
+ UnreliableInterface.class,
+ new UnreliableImplementation("impl1"),
+ new UnreliableImplementation("impl2"));
+ }
+
+ private static FlipFlopProxyProvider<UnreliableInterface>
+ newFlipFlopProxyProvider(TypeOfExceptionToFailWith t1,
+ TypeOfExceptionToFailWith t2) {
+ return new FlipFlopProxyProvider<UnreliableInterface>(
+ UnreliableInterface.class,
+ new UnreliableImplementation("impl1", t1),
+ new UnreliableImplementation("impl2", t2));
+ }
+
@Test
public void testSuccedsOnceThenFailOver() throws UnreliableException,
IOException, StandbyException {
- UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
- .create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1"),
- new UnreliableImplementation("impl2")),
- new FailOverOnceOnAnyExceptionPolicy());
+ UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create(
+ UnreliableInterface.class, newFlipFlopProxyProvider(),
+ new FailOverOnceOnAnyExceptionPolicy());
assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString());
assertEquals("impl2", unreliable.succeedsOnceThenFailsReturningString());
@@ -107,12 +120,10 @@ public class TestFailoverProxy {
@Test
public void testSucceedsTenTimesThenFailOver() throws UnreliableException,
IOException, StandbyException {
- UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
- .create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1"),
- new UnreliableImplementation("impl2")),
- new FailOverOnceOnAnyExceptionPolicy());
+ UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create(
+ UnreliableInterface.class,
+ newFlipFlopProxyProvider(),
+ new FailOverOnceOnAnyExceptionPolicy());
for (int i = 0; i < 10; i++) {
assertEquals("impl1", unreliable.succeedsTenTimesThenFailsReturningString());
@@ -123,11 +134,9 @@ public class TestFailoverProxy {
@Test
public void testNeverFailOver() throws UnreliableException,
IOException, StandbyException {
- UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
- .create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1"),
- new UnreliableImplementation("impl2")),
+ UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create(
+ UnreliableInterface.class,
+ newFlipFlopProxyProvider(),
RetryPolicies.TRY_ONCE_THEN_FAIL);
unreliable.succeedsOnceThenFailsReturningString();
@@ -142,11 +151,9 @@ public class TestFailoverProxy {
@Test
public void testFailoverOnStandbyException()
throws UnreliableException, IOException, StandbyException {
- UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
- .create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1"),
- new UnreliableImplementation("impl2")),
+ UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create(
+ UnreliableInterface.class,
+ newFlipFlopProxyProvider(),
RetryPolicies.failoverOnNetworkException(1));
assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString());
@@ -160,9 +167,9 @@ public class TestFailoverProxy {
unreliable = (UnreliableInterface)RetryProxy
.create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1", TypeOfExceptionToFailWith.STANDBY_EXCEPTION),
- new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION)),
+ newFlipFlopProxyProvider(
+ TypeOfExceptionToFailWith.STANDBY_EXCEPTION,
+ TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION),
RetryPolicies.failoverOnNetworkException(1));
assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString());
@@ -173,11 +180,11 @@ public class TestFailoverProxy {
@Test
public void testFailoverOnNetworkExceptionIdempotentOperation()
throws UnreliableException, IOException, StandbyException {
- UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
- .create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1", TypeOfExceptionToFailWith.IO_EXCEPTION),
- new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION)),
+ UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create(
+ UnreliableInterface.class,
+ newFlipFlopProxyProvider(
+ TypeOfExceptionToFailWith.IO_EXCEPTION,
+ TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION),
RetryPolicies.failoverOnNetworkException(1));
assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString());
@@ -204,9 +211,9 @@ public class TestFailoverProxy {
public void testExceptionPropagatedForNonIdempotentVoid() throws Exception {
UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
.create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1", TypeOfExceptionToFailWith.IO_EXCEPTION),
- new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION)),
+ newFlipFlopProxyProvider(
+ TypeOfExceptionToFailWith.IO_EXCEPTION,
+ TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION),
RetryPolicies.failoverOnNetworkException(1));
try {
@@ -268,7 +275,8 @@ public class TestFailoverProxy {
*/
@Test
public void testConcurrentMethodFailures() throws InterruptedException {
- FlipFlopProxyProvider proxyProvider = new FlipFlopProxyProvider(
+ FlipFlopProxyProvider<UnreliableInterface> proxyProvider
+ = new FlipFlopProxyProvider<UnreliableInterface>(
UnreliableInterface.class,
new SynchronizedUnreliableImplementation("impl1",
TypeOfExceptionToFailWith.STANDBY_EXCEPTION,
@@ -305,7 +313,8 @@ public class TestFailoverProxy {
final UnreliableImplementation impl1 = new UnreliableImplementation("impl1",
TypeOfExceptionToFailWith.STANDBY_EXCEPTION);
- FlipFlopProxyProvider proxyProvider = new FlipFlopProxyProvider(
+ FlipFlopProxyProvider<UnreliableInterface> proxyProvider
+ = new FlipFlopProxyProvider<UnreliableInterface>(
UnreliableInterface.class,
impl1,
new UnreliableImplementation("impl2",
@@ -333,13 +342,13 @@ public class TestFailoverProxy {
*/
@Test
public void testExpectedIOException() {
- UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
- .create(UnreliableInterface.class,
- new FlipFlopProxyProvider(UnreliableInterface.class,
- new UnreliableImplementation("impl1", TypeOfExceptionToFailWith.REMOTE_EXCEPTION),
- new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION)),
- RetryPolicies.failoverOnNetworkException(
- RetryPolicies.TRY_ONCE_THEN_FAIL, 10, 1000, 10000));
+ UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create(
+ UnreliableInterface.class,
+ newFlipFlopProxyProvider(
+ TypeOfExceptionToFailWith.REMOTE_EXCEPTION,
+ TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION),
+ RetryPolicies.failoverOnNetworkException(
+ RetryPolicies.TRY_ONCE_THEN_FAIL, 10, 1000, 10000));
try {
unreliable.failsIfIdentifierDoesntMatch("no-such-identifier");
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java Mon Aug 12 21:25:49 2013
@@ -35,6 +35,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.io.retry.UnreliableInterface.FatalException;
import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
+import org.apache.hadoop.ipc.ProtocolTranslator;
import org.apache.hadoop.ipc.RemoteException;
public class TestRetryProxy extends TestCase {
@@ -58,6 +59,38 @@ public class TestRetryProxy extends Test
}
}
+ /**
+ * Test for {@link RetryInvocationHandler#isRpcInvocation(Object)}
+ */
+ public void testRpcInvocation() throws Exception {
+ // For a proxy method should return true
+ final UnreliableInterface unreliable = (UnreliableInterface)
+ RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER);
+ assertTrue(RetryInvocationHandler.isRpcInvocation(unreliable));
+
+ // Embed the proxy in ProtocolTranslator
+ ProtocolTranslator xlator = new ProtocolTranslator() {
+ int count = 0;
+ @Override
+ public Object getUnderlyingProxyObject() {
+ count++;
+ return unreliable;
+ }
+ @Override
+ public String toString() {
+ return "" + count;
+ }
+ };
+
+ // For a proxy wrapped in ProtocolTranslator method should return true
+ assertTrue(RetryInvocationHandler.isRpcInvocation(xlator));
+ // Ensure underlying proxy was looked at
+ assertEquals(xlator.toString(), "1");
+
+ // For non-proxy the method must return false
+ assertFalse(RetryInvocationHandler.isRpcInvocation(new Object()));
+ }
+
public void testRetryForever() throws UnreliableException {
UnreliableInterface unreliable = (UnreliableInterface)
RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER);
@@ -138,7 +171,7 @@ public class TestRetryProxy extends Test
}
}
- public void testRetryByRemoteException() throws UnreliableException {
+ public void testRetryByRemoteException() {
Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL);
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java Mon Aug 12 21:25:49 2013
@@ -26,6 +26,8 @@ import org.apache.hadoop.ipc.StandbyExce
public interface UnreliableInterface {
public static class UnreliableException extends Exception {
+ private static final long serialVersionUID = 1L;
+
private String identifier;
public UnreliableException() {
@@ -43,6 +45,7 @@ public interface UnreliableInterface {
}
public static class FatalException extends UnreliableException {
+ private static final long serialVersionUID = 1L;
// no body
}
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java Mon Aug 12 21:25:49 2013
@@ -189,7 +189,7 @@ public class MiniRPCBenchmark {
MiniProtocol client = null;
try {
long start = Time.now();
- client = (MiniProtocol) RPC.getProxy(MiniProtocol.class,
+ client = RPC.getProxy(MiniProtocol.class,
MiniProtocol.versionID, addr, conf);
long end = Time.now();
return end - start;
@@ -211,7 +211,7 @@ public class MiniRPCBenchmark {
client = proxyUserUgi.doAs(new PrivilegedExceptionAction<MiniProtocol>() {
@Override
public MiniProtocol run() throws IOException {
- MiniProtocol p = (MiniProtocol) RPC.getProxy(MiniProtocol.class,
+ MiniProtocol p = RPC.getProxy(MiniProtocol.class,
MiniProtocol.versionID, addr, conf);
Token<TestDelegationTokenIdentifier> token;
token = p.getDelegationToken(new Text(RENEWER));
@@ -239,7 +239,7 @@ public class MiniRPCBenchmark {
client = currentUgi.doAs(new PrivilegedExceptionAction<MiniProtocol>() {
@Override
public MiniProtocol run() throws IOException {
- return (MiniProtocol) RPC.getProxy(MiniProtocol.class,
+ return RPC.getProxy(MiniProtocol.class,
MiniProtocol.versionID, addr, conf);
}
});
Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java?rev=1513258&r1=1513257&r2=1513258&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java Mon Aug 12 21:25:49 2013
@@ -31,7 +31,6 @@ import org.apache.commons.cli.HelpFormat
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ipc.RPC.Server;
@@ -55,7 +54,7 @@ import com.google.protobuf.BlockingServi
* Benchmark for protobuf RPC.
* Run with --help option for usage.
*/
-public class RPCCallBenchmark implements Tool, Configurable {
+public class RPCCallBenchmark implements Tool {
private Configuration conf;
private AtomicLong callCount = new AtomicLong(0);
private static ThreadMXBean threadBean =
@@ -403,7 +402,7 @@ public class RPCCallBenchmark implements
}
};
} else if (opts.rpcEngine == WritableRpcEngine.class) {
- final TestProtocol proxy = (TestProtocol)RPC.getProxy(
+ final TestProtocol proxy = RPC.getProxy(
TestProtocol.class, TestProtocol.versionID, addr, conf);
return new RpcServiceWrapper() {
@Override