You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2013/10/30 23:22:15 UTC

svn commit: r1537330 [5/7] - in /hadoop/common/branches/YARN-321/hadoop-common-project: ./ hadoop-annotations/ hadoop-auth/ hadoop-common/ hadoop-common/dev-support/ hadoop-common/src/ hadoop-common/src/main/bin/ hadoop-common/src/main/conf/ hadoop-com...

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java Wed Oct 30 22:21:59 2013
@@ -18,14 +18,6 @@
 
 package org.apache.hadoop.fs;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Shell;
@@ -34,6 +26,14 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.junit.Assert.*;
+
 /**
  * This test class checks basic operations with {@link HarFileSystem} including
  * various initialization cases, getters, and modification methods.
@@ -69,7 +69,7 @@ public class TestHarFileSystemBasics {
   /*
    * creates and returns fully initialized HarFileSystem
    */
-  private HarFileSystem createHarFileSysten(final Configuration conf)
+  private HarFileSystem createHarFileSystem(final Configuration conf)
       throws Exception {
     localFileSystem = FileSystem.getLocal(conf);
     localFileSystem.initialize(new URI("file:///"), conf);
@@ -130,7 +130,7 @@ public class TestHarFileSystemBasics {
     }
     // create Har to test:
     conf = new Configuration();
-    harFileSystem = createHarFileSysten(conf);
+    harFileSystem = createHarFileSystem(conf);
   }
 
   @After
@@ -221,6 +221,43 @@ public class TestHarFileSystemBasics {
     hfs.initialize(uri, new Configuration());
   }
 
+  @Test
+  public void testPositiveListFilesNotEndInColon() throws Exception {
+    // re-initialize the har file system with host name
+    // make sure the qualified path name does not append ":" at the end of host name
+    final URI uri = new URI("har://file-localhost" + harPath.toString());
+    harFileSystem.initialize(uri, conf);
+    Path p1 = new Path("har://file-localhost" + harPath.toString());
+    Path p2 = harFileSystem.makeQualified(p1);
+    assertTrue(p2.toUri().toString().startsWith("har://file-localhost/"));
+  }
+
+ @Test
+  public void testListLocatedStatus() throws Exception {
+    String testHarPath = this.getClass().getResource("/test.har").getPath();
+    URI uri = new URI("har://" + testHarPath);
+    HarFileSystem hfs = new HarFileSystem(localFileSystem);
+    hfs.initialize(uri, new Configuration());
+
+    // test.har has the following contents:
+    //   dir1/1.txt
+    //   dir1/2.txt
+    Set<String> expectedFileNames = new HashSet<String>();
+    expectedFileNames.add("1.txt");
+    expectedFileNames.add("2.txt");
+
+    // List contents of dir, and ensure we find all expected files
+    Path path = new Path("dir1");
+    RemoteIterator<LocatedFileStatus> fileList = hfs.listLocatedStatus(path);
+    while (fileList.hasNext()) {
+      String fileName = fileList.next().getPath().getName();
+      assertTrue(fileName + " not in expected files list", expectedFileNames.contains(fileName));
+      expectedFileNames.remove(fileName);
+    }
+    assertEquals("Didn't find all of the expected file names: " + expectedFileNames,
+                 0, expectedFileNames.size());
+  }
+
   // ========== Negative:
 
   @Test

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java Wed Oct 30 22:21:59 2013
@@ -49,6 +49,11 @@ public class TestLocalFSFileContextMainO
     FileContext fc1 = FileContext.getLocalFSFileContext();
     Assert.assertTrue(fc1 != fc);
   }
+  
+  @Override
+  protected boolean listCorruptedBlocksSupported() {
+    return false;
+  }
 
   @Test
   public void testDefaultFilePermission() throws IOException {

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Wed Oct 30 22:21:59 2013
@@ -460,6 +460,13 @@ public class TestPath extends TestCase {
       Path.mergePaths(new Path("/C:/foo"),
         new Path("/C:/bar")));
 
+    assertEquals(new Path(Shell.WINDOWS ? "/C:/bar" : "/C:/C:/bar"),
+        Path.mergePaths(new Path("/C:/"),
+          new Path("/C:/bar")));
+
+    assertEquals(new Path("/bar"),
+        Path.mergePaths(new Path("/"), new Path("/bar")));
+
     assertEquals(new Path("viewfs:///foo/bar"),
       Path.mergePaths(new Path("viewfs:///foo"),
         new Path("file:///bar")));
@@ -468,4 +475,16 @@ public class TestPath extends TestCase {
       Path.mergePaths(new Path("viewfs://vfsauthority/foo"),
         new Path("file://fileauthority/bar")));
   }
+
+  @Test (timeout = 30000)
+  public void testIsWindowsAbsolutePath() {
+    if (!Shell.WINDOWS) return;
+    assertTrue(Path.isWindowsAbsolutePath("C:\\test", false));
+    assertTrue(Path.isWindowsAbsolutePath("C:/test", false));
+    assertTrue(Path.isWindowsAbsolutePath("/C:/test", true));
+    assertFalse(Path.isWindowsAbsolutePath("/test", false));
+    assertFalse(Path.isWindowsAbsolutePath("/test", true));
+    assertFalse(Path.isWindowsAbsolutePath("C:test", false));
+    assertFalse(Path.isWindowsAbsolutePath("/C:test", true));
+  }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java Wed Oct 30 22:21:59 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.fs;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.fail;
 
 import java.io.BufferedReader;
@@ -26,10 +27,11 @@ import java.io.FileNotFoundException;
 import java.io.StringReader;
 
 import org.apache.hadoop.conf.Configuration;
+import org.junit.Assume;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-public class TestStat {
+public class TestStat extends FileSystemTestHelper {
 
   private static Stat stat;
 
@@ -113,6 +115,7 @@ public class TestStat {
 
   @Test(timeout=10000)
   public void testStatFileNotFound() throws Exception {
+    Assume.assumeTrue(Stat.isAvailable());
     try {
       stat.getFileStatus();
       fail("Expected FileNotFoundException");
@@ -125,4 +128,21 @@ public class TestStat {
   public void testStatEnvironment() throws Exception {
     assertEquals(stat.getEnvironment("LANG"), "C");
   }
+
+  @Test(timeout=10000)
+  public void testStat() throws Exception {
+    Assume.assumeTrue(Stat.isAvailable());
+    FileSystem fs = FileSystem.getLocal(new Configuration());
+    Path testDir = new Path(getTestRootPath(fs), "teststat");
+    fs.mkdirs(testDir);
+    Path sub1 = new Path(testDir, "sub1");
+    Path sub2 = new Path(testDir, "sub2");
+    fs.mkdirs(sub1);
+    fs.createSymlink(sub1, sub2, false);
+    FileStatus stat1 = new Stat(sub1, 4096l, false, fs).getFileStatus();
+    FileStatus stat2 = new Stat(sub2, 0, false, fs).getFileStatus();
+    assertTrue(stat1.isDirectory());
+    assertFalse(stat2.isDirectory());
+    fs.delete(testDir, true);
+  }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java Wed Oct 30 22:21:59 2013
@@ -18,13 +18,9 @@
 package org.apache.hadoop.fs.viewfs;
 
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileContextMainOperationsBaseTest;
-import org.apache.hadoop.fs.FileContextTestHelper;
-import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.viewfs.ConfigUtil;
 
 import org.junit.After;
 import org.junit.Before;
@@ -49,4 +45,9 @@ public class TestFcMainOperationsLocalFs
     super.tearDown();
     ViewFsTestSetup.tearDownForViewFsLocalFs(fileContextTestHelper);
   }
+  
+  @Override
+  protected boolean listCorruptedBlocksSupported() {
+    return false;
+  }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java Wed Oct 30 22:21:59 2013
@@ -53,6 +53,8 @@ import org.junit.Before;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * Copy-paste of ClientBase from ZooKeeper, but without any of the
  * JMXEnv verification. There seems to be a bug ZOOKEEPER-1438
@@ -111,7 +113,9 @@ public abstract class ClientBaseWithFixe
         synchronized boolean isConnected() {
             return connected;
         }
-        synchronized void waitForConnected(long timeout) throws InterruptedException, TimeoutException {
+        @VisibleForTesting
+        public synchronized void waitForConnected(long timeout)
+            throws InterruptedException, TimeoutException {
             long expire = Time.now() + timeout;
             long left = timeout;
             while(!connected && left > 0) {
@@ -123,7 +127,9 @@ public abstract class ClientBaseWithFixe
 
             }
         }
-        synchronized void waitForDisconnected(long timeout) throws InterruptedException, TimeoutException {
+        @VisibleForTesting
+        public synchronized void waitForDisconnected(long timeout)
+            throws InterruptedException, TimeoutException {
             long expire = Time.now() + timeout;
             long left = timeout;
             while(connected && left > 0) {

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java Wed Oct 30 22:21:59 2013
@@ -101,8 +101,12 @@ public class HttpServerFunctionalTest ex
     String webapps = System.getProperty(TEST_BUILD_WEBAPPS, BUILD_WEBAPPS_DIR);
     File testWebappDir = new File(webapps +
         File.separatorChar + TEST);
+    try {
     if (!testWebappDir.exists()) {
-      fail("Test webapp dir " + testWebappDir + " missing");
+      fail("Test webapp dir " + testWebappDir.getCanonicalPath() + " missing");
+    }
+    }
+    catch (IOException e) {
     }
   }
 

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java Wed Oct 30 22:21:59 2013
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.http;
 
+import org.apache.log4j.Logger;
 import org.junit.Test;
 
 public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
@@ -67,6 +68,27 @@ public class TestHttpServerLifecycle ext
   }
 
   /**
+   * Test that the server with request logging enabled
+   *
+   * @throws Throwable on failure
+   */
+  @Test
+  public void testStartedServerWithRequestLog() throws Throwable {
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setName("httprequestlog");
+    requestLogAppender.setFilename(System.getProperty("test.build.data", "/tmp/")
+        + "jetty-name-yyyy_mm_dd.log");
+    Logger.getLogger(HttpServer.class.getName() + ".test").addAppender(requestLogAppender);
+    HttpServer server = null;
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    assertAlive(server);
+    stop(server);
+    Logger.getLogger(HttpServer.class.getName() + ".test").removeAppender(requestLogAppender);
+  }
+
+  /**
    * Assert that the result of {@link HttpServer#toString()} contains the specific text
    * @param server server to examine
    * @param text text to search for

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java Wed Oct 30 22:21:59 2013
@@ -54,7 +54,7 @@ public class TestSSLHttpServer extends H
 
   @Before
   public void setup() throws Exception {
-    HttpConfig.setSecure(true);
+    HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY);
     File base = new File(BASEDIR);
     FileUtil.fullyDelete(base);
     base.mkdirs();
@@ -89,7 +89,7 @@ public class TestSSLHttpServer extends H
     String classpathDir =
         KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
     new File(classpathDir, CONFIG_SITE_XML).delete();
-    HttpConfig.setSecure(false);
+    HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
   }
   
 

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java Wed Oct 30 22:21:59 2013
@@ -19,18 +19,23 @@
 package org.apache.hadoop.io;
 
 import java.io.*;
+
 import junit.framework.TestCase;
 
 import org.apache.commons.logging.*;
-
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.conf.*;
 
 /** Support for flat files of binary key/value pairs. */
 public class TestArrayFile extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestArrayFile.class);
-  private static String FILE =
-    System.getProperty("test.build.data",".") + "/test.array";
+  
+  private static final Path TEST_DIR = new Path(
+      System.getProperty("test.build.data", "/tmp"),
+      TestMapFile.class.getSimpleName());
+  private static String TEST_FILE = new Path(TEST_DIR, "test.array").toString();
 
   public TestArrayFile(String name) { 
     super(name); 
@@ -40,15 +45,15 @@ public class TestArrayFile extends TestC
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
     RandomDatum[] data = generate(10000);
-    writeTest(fs, data, FILE);
-    readTest(fs, data, FILE, conf);
+    writeTest(fs, data, TEST_FILE);
+    readTest(fs, data, TEST_FILE, conf);
   }
 
   public void testEmptyFile() throws Exception {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
-    writeTest(fs, new RandomDatum[0], FILE);
-    ArrayFile.Reader reader = new ArrayFile.Reader(fs, FILE, conf);
+    writeTest(fs, new RandomDatum[0], TEST_FILE);
+    ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
     assertNull(reader.get(0, new RandomDatum()));
     reader.close();
   }
@@ -87,31 +92,75 @@ public class TestArrayFile extends TestC
       LOG.debug("reading " + data.length + " debug");
     }
     ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
-    for (int i = 0; i < data.length; i++) {       // try forwards
-      reader.get(i, v);
-      if (!v.equals(data[i])) {
-        throw new RuntimeException("wrong value at " + i);
+    try {
+      for (int i = 0; i < data.length; i++) {       // try forwards
+        reader.get(i, v);
+        if (!v.equals(data[i])) {
+          throw new RuntimeException("wrong value at " + i);
+        }
       }
-    }
-    for (int i = data.length-1; i >= 0; i--) {    // then backwards
-      reader.get(i, v);
-      if (!v.equals(data[i])) {
-        throw new RuntimeException("wrong value at " + i);
+      for (int i = data.length-1; i >= 0; i--) {    // then backwards
+        reader.get(i, v);
+        if (!v.equals(data[i])) {
+          throw new RuntimeException("wrong value at " + i);
+        }
       }
-    }
-    reader.close();
-    if(LOG.isDebugEnabled()) {
-      LOG.debug("done reading " + data.length + " debug");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("done reading " + data.length + " debug");
+      }
+    } finally {
+      reader.close();
     }
   }
 
-
+  /** 
+   * test on {@link ArrayFile.Reader} iteration methods
+   * <pre> 
+   * {@code next(), seek()} in and out of range.
+   * </pre>
+   */
+  public void testArrayFileIteration() {
+    int SIZE = 10;
+    Configuration conf = new Configuration();    
+    try {
+      FileSystem fs = FileSystem.get(conf);
+      ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, 
+          LongWritable.class, CompressionType.RECORD, defaultProgressable);
+      assertNotNull("testArrayFileIteration error !!!", writer);
+      
+      for (int i = 0; i < SIZE; i++)
+        writer.append(new LongWritable(i));
+      
+      writer.close();
+      
+      ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
+      LongWritable nextWritable = new LongWritable(0);
+      
+      for (int i = 0; i < SIZE; i++) {
+        nextWritable = (LongWritable)reader.next(nextWritable);
+        assertEquals(nextWritable.get(), i);
+      }
+        
+      assertTrue("testArrayFileIteration seek error !!!",
+          reader.seek(new LongWritable(6)));
+      nextWritable = (LongWritable) reader.next(nextWritable);
+      assertTrue("testArrayFileIteration error !!!", reader.key() == 7);
+      assertTrue("testArrayFileIteration error !!!",
+          nextWritable.equals(new LongWritable(7)));
+      assertFalse("testArrayFileIteration error !!!",
+          reader.seek(new LongWritable(SIZE + 5)));
+      reader.close();
+    } catch (Exception ex) {
+      fail("testArrayFileWriterConstruction error !!!");
+    }
+  }
+ 
   /** For debugging and testing. */
   public static void main(String[] args) throws Exception {
     int count = 1024 * 1024;
     boolean create = true;
     boolean check = true;
-    String file = FILE;
+    String file = TEST_FILE;
     String usage = "Usage: TestArrayFile [-count N] [-nocreate] [-nocheck] file";
       
     if (args.length == 0) {
@@ -160,4 +209,11 @@ public class TestArrayFile extends TestC
       fs.close();
     }
   }
+  
+  private static final Progressable defaultProgressable = new Progressable() {
+    @Override
+    public void progress() {      
+    }
+  };
+  
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java Wed Oct 30 22:21:59 2013
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,6 +20,8 @@ package org.apache.hadoop.io;
 
 import java.io.*;
 
+import org.junit.Assert;
+
 import junit.framework.TestCase;
 
 /** Unit tests for ArrayWritable */
@@ -61,4 +63,50 @@ public class TestArrayWritable extends T
       assertEquals(destElements[i],elements[i]);
     }
   }
+  
+ /**
+  * test {@link ArrayWritable} toArray() method 
+  */
+  public void testArrayWritableToArray() {
+    Text[] elements = {new Text("zero"), new Text("one"), new Text("two")};
+    TextArrayWritable arrayWritable = new TextArrayWritable();
+    arrayWritable.set(elements);
+    Object array = arrayWritable.toArray();
+  
+    assertTrue("TestArrayWritable testArrayWritableToArray error!!! ", array instanceof Text[]);
+    Text[] destElements = (Text[]) array;
+  
+    for (int i = 0; i < elements.length; i++) {
+      assertEquals(destElements[i], elements[i]);
+    }
+  }
+  
+  /**
+   * test {@link ArrayWritable} constructor with null
+   */
+  public void testNullArgument() {
+    try {
+      Class<? extends Writable> valueClass = null;
+      new ArrayWritable(valueClass);
+      fail("testNullArgument error !!!");
+    } catch (IllegalArgumentException exp) {
+      //should be for test pass
+    } catch (Exception e) {
+      fail("testNullArgument error !!!");
+    }
+  }
+
+  /**
+   * test {@link ArrayWritable} constructor with {@code String[]} as a parameter
+   */
+  @SuppressWarnings("deprecation")
+  public void testArrayWritableStringConstructor() {
+    String[] original = { "test1", "test2", "test3" };
+    ArrayWritable arrayWritable = new ArrayWritable(original);
+    assertEquals("testArrayWritableStringConstructor class error!!!", 
+        UTF8.class, arrayWritable.getValueClass());
+    Assert.assertArrayEquals("testArrayWritableStringConstructor toString error!!!",
+      original, arrayWritable.toStrings());
+  }
+  
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java Wed Oct 30 22:21:59 2013
@@ -18,28 +18,53 @@
 
 package org.apache.hadoop.io;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-
-import junit.framework.TestCase;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionInputStream;
+import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.Compressor;
+import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.Progressable;
+import org.junit.Assert;
 
 public class TestBloomMapFile extends TestCase {
   private static Configuration conf = new Configuration();
+  private static final Path TEST_ROOT = new Path(
+      System.getProperty("test.build.data", "/tmp"),
+      TestMapFile.class.getSimpleName());
+  private static final Path TEST_DIR = new Path(TEST_ROOT, "testfile");
+  private static final Path TEST_FILE = new Path(TEST_ROOT, "testfile");
+
+  @Override
+  public void setUp() throws Exception {
+    LocalFileSystem fs = FileSystem.getLocal(conf);
+    if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) {
+      Assert.fail("Can't clean up test root dir");
+    }
+    fs.mkdirs(TEST_ROOT);
+  }
   
+  @SuppressWarnings("deprecation")
   public void testMembershipTest() throws Exception {
     // write the file
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-        getName() + ".bloommapfile"); 
     FileSystem fs = FileSystem.getLocal(conf);
-    Path qualifiedDirName = fs.makeQualified(dirName);
+    Path qualifiedDirName = fs.makeQualified(TEST_DIR);
     conf.setInt("io.mapfile.bloom.size", 2048);
     BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), IntWritable.class, Text.class);
+        qualifiedDirName.toString(), IntWritable.class, Text.class);
     IntWritable key = new IntWritable();
     Text value = new Text();
     for (int i = 0; i < 2000; i += 2) {
@@ -48,7 +73,7 @@ public class TestBloomMapFile extends Te
       writer.append(key, value);
     }
     writer.close();
-    
+
     BloomMapFile.Reader reader = new BloomMapFile.Reader(fs,
         qualifiedDirName.toString(), conf);
     // check false positives rate
@@ -58,9 +83,11 @@ public class TestBloomMapFile extends Te
       key.set(i);
       boolean exists = reader.probablyHasKey(key);
       if (i % 2 == 0) {
-        if (!exists) falseNeg++;
+        if (!exists)
+          falseNeg++;
       } else {
-        if (exists) falsePos++;
+        if (exists)
+          falsePos++;
       }
     }
     reader.close();
@@ -71,13 +98,13 @@ public class TestBloomMapFile extends Te
     assertTrue(falsePos < 2);
   }
 
-  private void checkMembershipVaryingSizedKeys(String name, List<Text> keys) throws Exception {
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-        name + ".bloommapfile"); 
+  @SuppressWarnings("deprecation")
+  private void checkMembershipVaryingSizedKeys(String name, List<Text> keys)
+      throws Exception {
     FileSystem fs = FileSystem.getLocal(conf);
-    Path qualifiedDirName = fs.makeQualified(dirName);
+    Path qualifiedDirName = fs.makeQualified(TEST_DIR);
     BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), Text.class, NullWritable.class);
+        qualifiedDirName.toString(), Text.class, NullWritable.class);
     for (Text key : keys) {
       writer.append(key, NullWritable.get());
     }
@@ -88,7 +115,8 @@ public class TestBloomMapFile extends Te
         qualifiedDirName.toString(), conf);
     Collections.reverse(keys);
     for (Text key : keys) {
-      assertTrue("False negative for existing key " + key, reader.probablyHasKey(key));
+      assertTrue("False negative for existing key " + key,
+          reader.probablyHasKey(key));
     }
     reader.close();
     fs.delete(qualifiedDirName, true);
@@ -108,4 +136,171 @@ public class TestBloomMapFile extends Te
     checkMembershipVaryingSizedKeys(getName(), list);
   }
 
+  /**
+   * test {@code BloomMapFile.delete()} method
+   */
+  public void testDeleteFile() {
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+      assertNotNull("testDeleteFile error !!!", writer);
+      BloomMapFile.delete(fs, "." + TEST_FILE);
+    } catch (Exception ex) {
+      fail("unexpect ex in testDeleteFile !!!");
+    }
+  }
+  
+  /**
+   * test {@link BloomMapFile.Reader} constructor with 
+   * IOException
+   */
+  public void testIOExceptionInWriterConstructor() {
+    Path dirNameSpy = org.mockito.Mockito.spy(TEST_FILE);
+    try {
+      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+      writer.append(new IntWritable(1), new Text("123124142"));
+      writer.close();
+
+      org.mockito.Mockito.when(dirNameSpy.getFileSystem(conf)).thenThrow(
+          new IOException());
+      BloomMapFile.Reader reader = new BloomMapFile.Reader(dirNameSpy, conf,
+          MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
+
+      assertNull("testIOExceptionInWriterConstructor error !!!",
+          reader.getBloomFilter());
+      reader.close();
+    } catch (Exception ex) {
+      fail("unexpect ex in testIOExceptionInWriterConstructor !!!");
+    }
+  }
+
+  /**
+   *  test {@link BloomMapFile.Reader.get()} method 
+   */
+  public void testGetBloomMapFile() {
+    int SIZE = 10;
+    try {
+      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+
+      for (int i = 0; i < SIZE; i++) {
+        writer.append(new IntWritable(i), new Text());
+      }
+      writer.close();
+
+      BloomMapFile.Reader reader = new BloomMapFile.Reader(TEST_FILE, conf,
+          MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
+
+      for (int i = 0; i < SIZE; i++) {
+        assertNotNull("testGetBloomMapFile error !!!",
+            reader.get(new IntWritable(i), new Text()));
+      }
+            
+      assertNull("testGetBloomMapFile error !!!",
+          reader.get(new IntWritable(SIZE + 5), new Text()));
+      reader.close();
+    } catch (Exception ex) {
+      fail("unexpect ex in testGetBloomMapFile !!!");
+    }
+  }
+
+  /**
+   * test {@code BloomMapFile.Writer} constructors
+   */
+  @SuppressWarnings("deprecation")
+  public void testBloomMapFileConstructors() {
+    try {
+      FileSystem ts = FileSystem.get(conf);
+      String testFileName = TEST_FILE.toString();
+      BloomMapFile.Writer writer1 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
+          defaultCodec, defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer1);
+      BloomMapFile.Writer writer2 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
+          defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer2);
+      BloomMapFile.Writer writer3 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.BLOCK);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer3);
+      BloomMapFile.Writer writer4 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
+          defaultCodec, defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer4);
+      BloomMapFile.Writer writer5 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
+          defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer5);
+      BloomMapFile.Writer writer6 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.RECORD);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer6);
+      BloomMapFile.Writer writer7 = new BloomMapFile.Writer(conf, ts,
+          testFileName, WritableComparator.get(Text.class), Text.class);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer7);
+    } catch (Exception ex) {
+      fail("testBloomMapFileConstructors error !!!");
+    }
+  }
+
+  static final Progressable defaultProgress = new Progressable() {
+    @Override
+    public void progress() {
+    }
+  };
+
+  static final CompressionCodec defaultCodec = new CompressionCodec() {
+    @Override
+    public String getDefaultExtension() {
+      return null;
+    }
+
+    @Override
+    public Class<? extends Decompressor> getDecompressorType() {
+      return null;
+    }
+
+    @Override
+    public Class<? extends Compressor> getCompressorType() {
+      return null;
+    }
+
+    @Override
+    public CompressionOutputStream createOutputStream(OutputStream out,
+        Compressor compressor) throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionOutputStream createOutputStream(OutputStream out)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionInputStream createInputStream(InputStream in,
+        Decompressor decompressor) throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionInputStream createInputStream(InputStream in)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public Decompressor createDecompressor() {
+      return null;
+    }
+
+    @Override
+    public Compressor createCompressor() {
+      return null;
+    }
+  };
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java Wed Oct 30 22:21:59 2013
@@ -50,4 +50,28 @@ public class TestBooleanWritable {
     out.flush();
     return out;
   }
+  
+  /**
+   * test {@link BooleanWritable} methods hashCode(), equals(), compareTo() 
+   */
+  @Test
+  public void testCommonMethods() {    
+    assertTrue("testCommonMethods1 error !!!", newInstance(true).equals(newInstance(true)));
+    assertTrue("testCommonMethods2 error  !!!", newInstance(false).equals(newInstance(false)));
+    assertFalse("testCommonMethods3 error !!!", newInstance(false).equals(newInstance(true)));
+    assertTrue("testCommonMethods4 error !!!", checkHashCode(newInstance(true), newInstance(true)));
+    assertFalse("testCommonMethods5 error !!! ", checkHashCode(newInstance(true), newInstance(false)));
+    assertTrue("testCommonMethods6 error !!!", newInstance(true).compareTo(newInstance(false)) > 0 );
+    assertTrue("testCommonMethods7 error !!!", newInstance(false).compareTo(newInstance(true)) < 0 );
+    assertTrue("testCommonMethods8 error !!!", newInstance(false).compareTo(newInstance(false)) == 0 );
+    assertEquals("testCommonMethods9 error !!!", "true", newInstance(true).toString());
+  }
+  
+  private boolean checkHashCode(BooleanWritable f, BooleanWritable s) {
+    return f.hashCode() == s.hashCode();
+  }    
+  
+  private static BooleanWritable newInstance(boolean flag) {
+    return new BooleanWritable(flag);
+  }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java Wed Oct 30 22:21:59 2013
@@ -133,5 +133,24 @@ public class TestBytesWritable {
     assertTrue("buffer created with (array, len) has bad length",
         zeroBuf.getLength() == copyBuf.getLength());
   }
+    
+  /**
+   * test {@link ByteWritable} 
+   * methods compareTo(), toString(), equals()
+   */
+  @Test
+  public void testObjectCommonMethods() {    
+    byte b = 0x9;
+    ByteWritable bw = new ByteWritable();
+    bw.set(b);
+    assertTrue("testSetByteWritable error", bw.get() == b);
+    assertTrue("testSetByteWritable error < 0", bw.compareTo(new ByteWritable((byte)0xA)) < 0);
+    assertTrue("testSetByteWritable error > 0", bw.compareTo(new ByteWritable((byte)0x8)) > 0);
+    assertTrue("testSetByteWritable error == 0", bw.compareTo(new ByteWritable((byte)0x9)) == 0);
+    assertTrue("testSetByteWritable equals error !!!", bw.equals(new ByteWritable((byte)0x9)));
+    assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new ByteWritable((byte)0xA)));
+    assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new IntWritable(1)));
+    assertEquals("testSetByteWritable error ", "9", bw.toString());    
+  }
+  
 }
-

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java Wed Oct 30 22:21:59 2013
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,6 +20,7 @@ package org.apache.hadoop.io;
 
 import java.io.IOException;
 import java.util.EnumSet;
+import java.util.Iterator;
 import java.lang.reflect.Type;
 
 import junit.framework.TestCase;
@@ -32,8 +33,8 @@ public class TestEnumSetWritable extends
   }
 
   EnumSet<TestEnumSet> nonEmptyFlag = EnumSet.of(TestEnumSet.APPEND);
-  EnumSetWritable<TestEnumSet> nonEmptyFlagWritable = new EnumSetWritable<TestEnumSet>(
-      nonEmptyFlag);
+  EnumSetWritable<TestEnumSet> nonEmptyFlagWritable = 
+      new EnumSetWritable<TestEnumSet>(nonEmptyFlag);
 
   @SuppressWarnings("unchecked")
   public void testSerializeAndDeserializeNonEmpty() throws IOException {
@@ -60,11 +61,12 @@ public class TestEnumSetWritable extends
     }
 
     assertTrue(
-        "Instantiate empty EnumSetWritable with no element type class providesd should throw exception.",
+        "Instantiation of empty EnumSetWritable with no element type class "
+        + "provided should throw exception.",
         gotException);
 
-    EnumSetWritable<TestEnumSet> emptyFlagWritable = new EnumSetWritable<TestEnumSet>(
-        emptyFlag, TestEnumSet.class);
+    EnumSetWritable<TestEnumSet> emptyFlagWritable = 
+        new EnumSetWritable<TestEnumSet>(emptyFlag, TestEnumSet.class);
     DataOutputBuffer out = new DataOutputBuffer();
     ObjectWritable.writeObject(out, emptyFlagWritable, emptyFlagWritable
         .getClass(), null);
@@ -86,11 +88,12 @@ public class TestEnumSetWritable extends
     }
 
     assertTrue(
-        "Instantiate empty EnumSetWritable with no element type class providesd should throw exception.",
+        "Instantiation of empty EnumSetWritable with no element type class "
+        + "provided should throw exception",
         gotException);
 
-    EnumSetWritable<TestEnumSet> nullFlagWritable = new EnumSetWritable<TestEnumSet>(
-        null, TestEnumSet.class);
+    EnumSetWritable<TestEnumSet> nullFlagWritable = 
+        new EnumSetWritable<TestEnumSet>(null, TestEnumSet.class);
 
     DataOutputBuffer out = new DataOutputBuffer();
     ObjectWritable.writeObject(out, nullFlagWritable, nullFlagWritable
@@ -105,10 +108,54 @@ public class TestEnumSetWritable extends
   public EnumSetWritable<TestEnumSet> testField;
 
   public void testAvroReflect() throws Exception {
-    String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\",\"name\":\"TestEnumSet\",\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\",\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
+    String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\","
+        + "\"name\":\"TestEnumSet\","
+        + "\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\","
+        + "\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},"
+        + "\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
     Type type =
       TestEnumSetWritable.class.getField("testField").getGenericType();
     AvroTestUtil.testReflect(nonEmptyFlagWritable, type, schema);
+  }    
+  
+  /**
+   * test {@link EnumSetWritable} equals() method
+   */
+  public void testEnumSetWritableEquals() {
+    EnumSetWritable<TestEnumSet> eset1 = new EnumSetWritable<TestEnumSet>(
+        EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+    EnumSetWritable<TestEnumSet> eset2 = new EnumSetWritable<TestEnumSet>(
+        EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+    assertTrue("testEnumSetWritableEquals error !!!", eset1.equals(eset2));
+    assertFalse("testEnumSetWritableEquals error !!!",
+        eset1.equals(new EnumSetWritable<TestEnumSet>(EnumSet.of(
+            TestEnumSet.APPEND, TestEnumSet.CREATE, TestEnumSet.OVERWRITE),
+            TestEnumSet.class)));
+    assertTrue("testEnumSetWritableEquals getElementType error !!!", eset1
+        .getElementType().equals(TestEnumSet.class));
   }
+  
+  /** 
+   * test {@code EnumSetWritable.write(DataOutputBuffer out)} 
+   *  and iteration by TestEnumSet through iterator().
+   */
+  public void testEnumSetWritableWriteRead() throws Exception {
+    EnumSetWritable<TestEnumSet> srcSet = new EnumSetWritable<TestEnumSet>(
+        EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+    DataOutputBuffer out = new DataOutputBuffer();
+    srcSet.write(out);
+
+    EnumSetWritable<TestEnumSet> dstSet = new EnumSetWritable<TestEnumSet>();
+    DataInputBuffer in = new DataInputBuffer();
+    in.reset(out.getData(), out.getLength());
+    dstSet.readFields(in);
 
+    EnumSet<TestEnumSet> result = dstSet.get();
+    Iterator<TestEnumSet> dstIter = result.iterator();
+    Iterator<TestEnumSet> srcIter = srcSet.iterator();
+    while (dstIter.hasNext() && srcIter.hasNext()) {
+      assertEquals("testEnumSetWritableWriteRead error !!!", dstIter.next(),
+          srcIter.next());
+    }
+  }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java Wed Oct 30 22:21:59 2013
@@ -17,29 +17,592 @@
  */
 package org.apache.hadoop.io;
 
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionInputStream;
+import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.Compressor;
+import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.Progressable;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
 
-import junit.framework.TestCase;
+import static org.mockito.Mockito.*;
 
-public class TestMapFile extends TestCase {
+public class TestMapFile {
+  
+  private static final Path TEST_DIR = new Path(
+      System.getProperty("test.build.data", "/tmp"),
+      TestMapFile.class.getSimpleName());
+  
   private static Configuration conf = new Configuration();
 
+  @Before
+  public void setup() throws Exception {
+    LocalFileSystem fs = FileSystem.getLocal(conf);
+    if (fs.exists(TEST_DIR) && !fs.delete(TEST_DIR, true)) {
+      Assert.fail("Can't clean up test root dir");
+    }
+    fs.mkdirs(TEST_DIR);
+  }
+  
+  private static final Progressable defaultProgressable = new Progressable() {
+    @Override
+    public void progress() {
+    }
+  };
+
+  private static final CompressionCodec defaultCodec = new CompressionCodec() {
+    @Override
+    public CompressionOutputStream createOutputStream(OutputStream out)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionOutputStream createOutputStream(OutputStream out,
+        Compressor compressor) throws IOException {
+      return null;
+    }
+
+    @Override
+    public Class<? extends Compressor> getCompressorType() {
+      return null;
+    }
+
+    @Override
+    public Compressor createCompressor() {
+      return null;
+    }
+
+    @Override
+    public CompressionInputStream createInputStream(InputStream in)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionInputStream createInputStream(InputStream in,
+        Decompressor decompressor) throws IOException {
+      return null;
+    }
+
+    @Override
+    public Class<? extends Decompressor> getDecompressorType() {
+      return null;
+    }
+
+    @Override
+    public Decompressor createDecompressor() {
+      return null;
+    }
+
+    @Override
+    public String getDefaultExtension() {
+      return null;
+    }
+  };
+
+  private MapFile.Writer createWriter(String fileName,
+      Class<? extends WritableComparable<?>> keyClass,
+      Class<? extends Writable> valueClass) throws IOException {
+    Path dirName = new Path(TEST_DIR, fileName);
+    MapFile.Writer.setIndexInterval(conf, 4);
+    return new MapFile.Writer(conf, dirName, MapFile.Writer.keyClass(keyClass),
+        MapFile.Writer.valueClass(valueClass));
+  }
+
+  private MapFile.Reader createReader(String fileName,
+      Class<? extends WritableComparable<?>> keyClass) throws IOException {
+    Path dirName = new Path(TEST_DIR, fileName);
+    return new MapFile.Reader(dirName, conf,
+        MapFile.Reader.comparator(new WritableComparator(keyClass)));
+  }
+  
+  /**
+   * test {@code MapFile.Reader.getClosest()} method 
+   *
+   */
+  @Test
+  public void testGetClosestOnCurrentApi() throws Exception {
+    final String TEST_PREFIX = "testGetClosestOnCurrentApi.mapfile";
+    MapFile.Writer writer = createWriter(TEST_PREFIX, Text.class, Text.class);
+    int FIRST_KEY = 1;
+    // Test keys: 11,21,31,...,91
+    for (int i = FIRST_KEY; i < 100; i += 10) {      
+      Text t = new Text(Integer.toString(i));
+      writer.append(t, t);
+    }
+    writer.close();
+
+    MapFile.Reader reader = createReader(TEST_PREFIX, Text.class);
+    Text key = new Text("55");
+    Text value = new Text();
+
+    // Test get closest with step forward
+    Text closest = (Text) reader.getClosest(key, value);
+    assertEquals(new Text("61"), closest);
+
+    // Test get closest with step back
+    closest = (Text) reader.getClosest(key, value, true);
+    assertEquals(new Text("51"), closest);
+
+    // Test get closest when we pass explicit key
+    final Text explicitKey = new Text("21");
+    closest = (Text) reader.getClosest(explicitKey, value);
+    assertEquals(new Text("21"), explicitKey);
+
+    // Test what happens at boundaries. Assert if searching a key that is
+    // less than first key in the mapfile, that the first key is returned.
+    key = new Text("00");
+    closest = (Text) reader.getClosest(key, value);
+    assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
+
+    // Assert that null is returned if key is > last entry in mapfile.
+    key = new Text("92");
+    closest = (Text) reader.getClosest(key, value);
+    assertNull("Not null key in testGetClosestWithNewCode", closest);
+
+    // If we were looking for the key before, we should get the last key
+    closest = (Text) reader.getClosest(key, value, true);
+    assertEquals(new Text("91"), closest);
+  }
+  
+  /**
+   * test {@code MapFile.Reader.midKey() } method 
+   */
+  @Test
+  public void testMidKeyOnCurrentApi() throws Exception {
+    // Write a mapfile of simple data: keys are
+    final String TEST_PREFIX = "testMidKeyOnCurrentApi.mapfile";
+    MapFile.Writer writer = createWriter(TEST_PREFIX, IntWritable.class,
+        IntWritable.class);
+    // 0,1,....9
+    int SIZE = 10;
+    for (int i = 0; i < SIZE; i++)
+      writer.append(new IntWritable(i), new IntWritable(i));
+    writer.close();
+
+    MapFile.Reader reader = createReader(TEST_PREFIX, IntWritable.class);
+    assertEquals(new IntWritable((SIZE - 1) / 2), reader.midKey());
+  }
+  
+  /**
+   * test  {@code MapFile.Writer.rename()} method 
+   */
+  @Test
+  public void testRename() {
+    final String NEW_FILE_NAME = "test-new.mapfile";
+    final String OLD_FILE_NAME = "test-old.mapfile";
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
+          IntWritable.class);
+      writer.close();
+      MapFile.rename(fs, new Path(TEST_DIR, OLD_FILE_NAME).toString(), 
+          new Path(TEST_DIR, NEW_FILE_NAME).toString());
+      MapFile.delete(fs, new Path(TEST_DIR, NEW_FILE_NAME).toString());
+    } catch (IOException ex) {
+      fail("testRename error " + ex);
+    }
+  }
+  
+  /**
+   * test {@code MapFile.rename()} 
+   *  method with throwing {@code IOException}  
+   */
+  @Test
+  public void testRenameWithException() {
+    final String ERROR_MESSAGE = "Can't rename file";
+    final String NEW_FILE_NAME = "test-new.mapfile";
+    final String OLD_FILE_NAME = "test-old.mapfile";
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      FileSystem spyFs = spy(fs);
+
+      MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
+          IntWritable.class);
+      writer.close();
+
+      Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME);
+      Path newDir = new Path(TEST_DIR, NEW_FILE_NAME);
+      when(spyFs.rename(oldDir, newDir)).thenThrow(
+          new IOException(ERROR_MESSAGE));
+
+      MapFile.rename(spyFs, oldDir.toString(), newDir.toString());
+      fail("testRenameWithException no exception error !!!");
+    } catch (IOException ex) {
+      assertEquals("testRenameWithException invalid IOExceptionMessage !!!",
+          ex.getMessage(), ERROR_MESSAGE);
+    }
+  }
+
+  @Test
+  public void testRenameWithFalse() {
+    final String ERROR_MESSAGE = "Could not rename";
+    final String NEW_FILE_NAME = "test-new.mapfile";
+    final String OLD_FILE_NAME = "test-old.mapfile";
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      FileSystem spyFs = spy(fs);
+
+      MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
+          IntWritable.class);
+      writer.close();
+
+      Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME);
+      Path newDir = new Path(TEST_DIR, NEW_FILE_NAME);
+      when(spyFs.rename(oldDir, newDir)).thenReturn(false);
+
+      MapFile.rename(spyFs, oldDir.toString(), newDir.toString());
+      fail("testRenameWithException no exception error !!!");
+    } catch (IOException ex) {
+      assertTrue("testRenameWithFalse invalid IOExceptionMessage error !!!", ex
+          .getMessage().startsWith(ERROR_MESSAGE));
+    }
+  }
+  
+  /**
+   * test throwing {@code IOException} in {@code MapFile.Writer} constructor    
+   */
+  @Test
+  public void testWriteWithFailDirCreation() {
+    String ERROR_MESSAGE = "Mkdirs failed to create directory";
+    Path dirName = new Path(TEST_DIR, "fail.mapfile");
+    MapFile.Writer writer = null;
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      FileSystem spyFs = spy(fs);
+      Path pathSpy = spy(dirName);
+      when(pathSpy.getFileSystem(conf)).thenReturn(spyFs);
+      when(spyFs.mkdirs(dirName)).thenReturn(false);
+
+      writer = new MapFile.Writer(conf, pathSpy,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+      fail("testWriteWithFailDirCreation error !!!");
+    } catch (IOException ex) {
+      assertTrue("testWriteWithFailDirCreation ex error !!!", ex.getMessage()
+          .startsWith(ERROR_MESSAGE));
+    } finally {
+      if (writer != null)
+        try {
+          writer.close();
+        } catch (IOException e) {
+        }
+    }
+  }
+
+  /**
+   * test {@code MapFile.Reader.finalKey()} method
+   */
+  @Test
+  public void testOnFinalKey() {
+    final String TEST_METHOD_KEY = "testOnFinalKey.mapfile";
+    int SIZE = 10;
+    try {
+      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+          IntWritable.class);
+      for (int i = 0; i < SIZE; i++)
+        writer.append(new IntWritable(i), new IntWritable(i));
+      writer.close();
+
+      MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class);
+      IntWritable expectedKey = new IntWritable(0);
+      reader.finalKey(expectedKey);
+      assertEquals("testOnFinalKey not same !!!", expectedKey, new IntWritable(
+          9));
+    } catch (IOException ex) {
+      fail("testOnFinalKey error !!!");
+    }
+  }
+  
+  /**
+   * test {@code MapFile.Writer} constructor with key, value
+   * and validate it with {@code keyClass(), valueClass()} methods 
+   */
+  @Test
+  public void testKeyValueClasses() {
+    Class<? extends WritableComparable<?>> keyClass = IntWritable.class;
+    Class<?> valueClass = Text.class;
+    try {
+      createWriter("testKeyValueClasses.mapfile", IntWritable.class, Text.class);
+      assertNotNull("writer key class null error !!!",
+          MapFile.Writer.keyClass(keyClass));
+      assertNotNull("writer value class null error !!!",
+          MapFile.Writer.valueClass(valueClass));
+    } catch (IOException ex) {
+      fail(ex.getMessage());
+    }
+  }
+  
+  /**
+   * test {@code MapFile.Reader.getClosest() } with wrong class key
+   */
+  @Test
+  public void testReaderGetClosest() throws Exception {
+    final String TEST_METHOD_KEY = "testReaderWithWrongKeyClass.mapfile";
+    try {
+      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+          Text.class);
+
+      for (int i = 0; i < 10; i++)
+        writer.append(new IntWritable(i), new Text("value" + i));
+      writer.close();
+
+      MapFile.Reader reader = createReader(TEST_METHOD_KEY, Text.class);
+      reader.getClosest(new Text("2"), new Text(""));
+      fail("no excepted exception in testReaderWithWrongKeyClass !!!");
+    } catch (IOException ex) {
+      /* Should be thrown to pass the test */
+    }
+  }
+  
+  /**
+   * test {@code MapFile.Writer.append() } with wrong key class
+   */
+  @Test
+  public void testReaderWithWrongValueClass() {
+    final String TEST_METHOD_KEY = "testReaderWithWrongValueClass.mapfile";
+    try {
+      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+          Text.class);
+      writer.append(new IntWritable(0), new IntWritable(0));
+      fail("no excepted exception in testReaderWithWrongKeyClass !!!");
+    } catch (IOException ex) {
+      /* Should be thrown to pass the test */
+    }
+  }
+  
+  /**
+   * test {@code MapFile.Reader.next(key, value)} for iteration.
+   */
+  @Test
+  public void testReaderKeyIteration() {
+    final String TEST_METHOD_KEY = "testReaderKeyIteration.mapfile";
+    int SIZE = 10;
+    int ITERATIONS = 5;
+    try {
+      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+          Text.class);
+      int start = 0;
+      for (int i = 0; i < SIZE; i++)
+        writer.append(new IntWritable(i), new Text("Value:" + i));
+      writer.close();
+
+      MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class);
+      // test iteration
+      Writable startValue = new Text("Value:" + start);
+      int i = 0;
+      while (i++ < ITERATIONS) {
+        IntWritable key = new IntWritable(start);
+        Writable value = startValue;
+        while (reader.next(key, value)) {
+          assertNotNull(key);
+          assertNotNull(value);
+        }
+        reader.reset();
+      }
+      assertTrue("reader seek error !!!",
+          reader.seek(new IntWritable(SIZE / 2)));
+      assertFalse("reader seek error !!!",
+          reader.seek(new IntWritable(SIZE * 2)));
+    } catch (IOException ex) {
+      fail("reader seek error !!!");
+    }
+  }
+
+  /**
+   * test {@code MapFile.Writer.testFix} method
+   */
+  @Test
+  public void testFix() {
+    final String INDEX_LESS_MAP_FILE = "testFix.mapfile";
+    int PAIR_SIZE = 20;
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      Path dir = new Path(TEST_DIR, INDEX_LESS_MAP_FILE);
+      MapFile.Writer writer = createWriter(INDEX_LESS_MAP_FILE,
+          IntWritable.class, Text.class);
+      for (int i = 0; i < PAIR_SIZE; i++)
+        writer.append(new IntWritable(0), new Text("value"));
+      writer.close();
+
+      File indexFile = new File(".", "." + INDEX_LESS_MAP_FILE + "/index");
+      boolean isDeleted = false;
+      if (indexFile.exists())
+        isDeleted = indexFile.delete();
+
+      if (isDeleted)
+        assertTrue("testFix error !!!",
+            MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE);
+    } catch (Exception ex) {
+      fail("testFix error !!!");
+    }
+  }
+  /**
+   * test all available constructor for {@code MapFile.Writer}
+   */
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testDeprecatedConstructors() {
+    String path = new Path(TEST_DIR, "writes.mapfile").toString();
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      MapFile.Writer writer = new MapFile.Writer(conf, fs, path,
+          IntWritable.class, Text.class, CompressionType.RECORD);
+      assertNotNull(writer);
+      writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
+          Text.class, CompressionType.RECORD, defaultProgressable);
+      assertNotNull(writer);
+      writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
+          Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable);
+      assertNotNull(writer);
+      writer = new MapFile.Writer(conf, fs, path,
+          WritableComparator.get(Text.class), Text.class);
+      assertNotNull(writer);
+      writer = new MapFile.Writer(conf, fs, path,
+          WritableComparator.get(Text.class), Text.class,
+          SequenceFile.CompressionType.RECORD);
+      assertNotNull(writer);
+      writer = new MapFile.Writer(conf, fs, path,
+          WritableComparator.get(Text.class), Text.class,
+          CompressionType.RECORD, defaultProgressable);
+      assertNotNull(writer);
+      writer.close();
+
+      MapFile.Reader reader = new MapFile.Reader(fs, path,
+          WritableComparator.get(IntWritable.class), conf);
+      assertNotNull(reader);
+      assertNotNull("reader key is null !!!", reader.getKeyClass());
+      assertNotNull("reader value in null", reader.getValueClass());
+
+    } catch (IOException e) {
+      fail(e.getMessage());
+    }
+  }
+  
+  /**
+   * test {@code MapFile.Writer} constructor 
+   * with IllegalArgumentException  
+   *  
+   */
+  @Test
+  public void testKeyLessWriterCreation() {
+    MapFile.Writer writer = null;
+    try {
+      writer = new MapFile.Writer(conf, TEST_DIR);
+      fail("fail in testKeyLessWriterCreation !!!");
+    } catch (IllegalArgumentException ex) {
+    } catch (Exception e) {
+      fail("fail in testKeyLessWriterCreation. Other ex !!!");
+    } finally {
+      if (writer != null)
+        try {
+          writer.close();
+        } catch (IOException e) {
+        }
+    }
+  }
+  /**
+   * test {@code MapFile.Writer} constructor with IOException
+   */
+  @Test
+  public void testPathExplosionWriterCreation() {
+    Path path = new Path(TEST_DIR, "testPathExplosionWriterCreation.mapfile");
+    String TEST_ERROR_MESSAGE = "Mkdirs failed to create directory "
+        + path.getName();
+    MapFile.Writer writer = null;
+    try {
+      FileSystem fsSpy = spy(FileSystem.get(conf));
+      Path pathSpy = spy(path);
+      when(fsSpy.mkdirs(path)).thenThrow(new IOException(TEST_ERROR_MESSAGE));
+
+      when(pathSpy.getFileSystem(conf)).thenReturn(fsSpy);
+
+      writer = new MapFile.Writer(conf, pathSpy,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(IntWritable.class));
+      fail("fail in testPathExplosionWriterCreation !!!");
+    } catch (IOException ex) {
+      assertEquals("testPathExplosionWriterCreation ex message error !!!",
+          ex.getMessage(), TEST_ERROR_MESSAGE);
+    } catch (Exception e) {
+      fail("fail in testPathExplosionWriterCreation. Other ex !!!");
+    } finally {
+      if (writer != null)
+        try {
+          writer.close();
+        } catch (IOException e) {
+        }
+    }
+  }
+
+  /**
+   * test {@code MapFile.Writer.append} method with desc order  
+   */
+  @Test
+  public void testDescOrderWithThrowExceptionWriterAppend() {
+    try {
+      MapFile.Writer writer = createWriter(".mapfile", IntWritable.class,
+          Text.class);
+      writer.append(new IntWritable(2), new Text("value: " + 1));
+      writer.append(new IntWritable(2), new Text("value: " + 2));
+      writer.append(new IntWritable(2), new Text("value: " + 4));
+      writer.append(new IntWritable(1), new Text("value: " + 3));
+      fail("testDescOrderWithThrowExceptionWriterAppend not expected exception error !!!");
+    } catch (IOException ex) {
+    } catch (Exception e) {
+      fail("testDescOrderWithThrowExceptionWriterAppend other ex throw !!!");
+    }
+  }
+
+  @Test
+  public void testMainMethodMapFile() {
+    String path = new Path(TEST_DIR, "mainMethodMapFile.mapfile").toString();
+    String inFile = "mainMethodMapFile.mapfile";
+    String outFile = "mainMethodMapFile.mapfile";
+    String[] args = { path, outFile };
+    try {
+      MapFile.Writer writer = createWriter(inFile, IntWritable.class,
+          Text.class);
+      writer.append(new IntWritable(1), new Text("test_text1"));
+      writer.append(new IntWritable(2), new Text("test_text2"));
+      writer.close();
+      MapFile.main(args);
+    } catch (Exception ex) {
+      fail("testMainMethodMapFile error !!!");
+    }
+  }
+
   /**
    * Test getClosest feature.
+   * 
    * @throws Exception
    */
+  @Test
+  @SuppressWarnings("deprecation")
   public void testGetClosest() throws Exception {
-    // Write a mapfile of simple data: keys are 
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-      getName() + ".mapfile"); 
+    // Write a mapfile of simple data: keys are
+    Path dirName = new Path(TEST_DIR, "testGetClosest.mapfile");
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
     // Make an index entry for every third insertion.
     MapFile.Writer.setIndexInterval(conf, 3);
     MapFile.Writer writer = new MapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), Text.class, Text.class);
+        qualifiedDirName.toString(), Text.class, Text.class);
     // Assert that the index interval is 1
     assertEquals(3, writer.getIndexInterval());
     // Add entries up to 100 in intervals of ten.
@@ -51,74 +614,84 @@ public class TestMapFile extends TestCas
     }
     writer.close();
     // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
-      conf);
+    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+    try {
     Text key = new Text("55");
     Text value = new Text();
-    Text closest = (Text)reader.getClosest(key, value);
+    Text closest = (Text) reader.getClosest(key, value);
     // Assert that closest after 55 is 60
     assertEquals(new Text("60"), closest);
     // Get closest that falls before the passed key: 50
-    closest = (Text)reader.getClosest(key, value, true);
+    closest = (Text) reader.getClosest(key, value, true);
     assertEquals(new Text("50"), closest);
     // Test get closest when we pass explicit key
     final Text TWENTY = new Text("20");
-    closest = (Text)reader.getClosest(TWENTY, value);
+    closest = (Text) reader.getClosest(TWENTY, value);
     assertEquals(TWENTY, closest);
-    closest = (Text)reader.getClosest(TWENTY, value, true);
+    closest = (Text) reader.getClosest(TWENTY, value, true);
     assertEquals(TWENTY, closest);
-    // Test what happens at boundaries.  Assert if searching a key that is
+    // Test what happens at boundaries. Assert if searching a key that is
     // less than first key in the mapfile, that the first key is returned.
     key = new Text("00");
-    closest = (Text)reader.getClosest(key, value);
+    closest = (Text) reader.getClosest(key, value);
     assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
-    
-    // If we're looking for the first key before, and we pass in a key before 
+
+    // If we're looking for the first key before, and we pass in a key before
     // the first key in the file, we should get null
-    closest = (Text)reader.getClosest(key, value, true);
+    closest = (Text) reader.getClosest(key, value, true);
     assertNull(closest);
-    
+
     // Assert that null is returned if key is > last entry in mapfile.
     key = new Text("99");
-    closest = (Text)reader.getClosest(key, value);
+    closest = (Text) reader.getClosest(key, value);
     assertNull(closest);
 
     // If we were looking for the key before, we should get the last key
-    closest = (Text)reader.getClosest(key, value, true);
+    closest = (Text) reader.getClosest(key, value, true);
     assertEquals(new Text("90"), closest);
+    } finally {
+      reader.close();
+    }
   }
 
+  @Test
+  @SuppressWarnings("deprecation")
   public void testMidKey() throws Exception {
-    // Write a mapfile of simple data: keys are 
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-      getName() + ".mapfile"); 
+    // Write a mapfile of simple data: keys are
+    Path dirName = new Path(TEST_DIR, "testMidKey.mapfile");
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
- 
+
     MapFile.Writer writer = new MapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
+        qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
     writer.append(new IntWritable(1), new IntWritable(1));
     writer.close();
     // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
-      conf);
-    assertEquals(new IntWritable(1), reader.midKey());
+    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+    try {
+      assertEquals(new IntWritable(1), reader.midKey());
+    } finally {
+      reader.close();
+    }
   }
 
-
+  @Test
+  @SuppressWarnings("deprecation")
   public void testMidKeyEmpty() throws Exception {
-    // Write a mapfile of simple data: keys are 
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-      getName() + ".mapfile"); 
+    // Write a mapfile of simple data: keys are
+    Path dirName = new Path(TEST_DIR, "testMidKeyEmpty.mapfile");
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
- 
+
     MapFile.Writer writer = new MapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
+        qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
     writer.close();
     // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
-      conf);
-    assertEquals(null, reader.midKey());
+    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+    try {
+      assertEquals(null, reader.midKey()); 
+    } finally {
+      reader.close();
+    }
   }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java Wed Oct 30 22:21:59 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.io;
 
 import java.io.*;
 import java.util.*;
+import java.util.concurrent.atomic.AtomicReference;
+
 import junit.framework.TestCase;
 
 import org.apache.commons.logging.*;
@@ -51,6 +53,39 @@ public class TestSetFile extends TestCas
       fs.close();
     }
   }
+  
+  /**
+   * test {@code SetFile.Reader} methods 
+   * next(), get() in combination 
+   */
+  public void testSetFileAccessMethods() {    
+    try {             
+      FileSystem fs = FileSystem.getLocal(conf);
+      int size = 10;
+      writeData(fs, size);
+      SetFile.Reader reader = createReader(fs);
+      assertTrue("testSetFileWithConstruction1 error !!!", reader.next(new IntWritable(0)));
+      // don't know why reader.get(i) return i+1
+      assertEquals("testSetFileWithConstruction2 error !!!", new IntWritable(size/2 + 1), reader.get(new IntWritable(size/2)));      
+      assertNull("testSetFileWithConstruction3 error !!!", reader.get(new IntWritable(size*2)));
+    } catch (Exception ex) {
+      fail("testSetFileWithConstruction error !!!");    
+    }
+  }
+
+  private SetFile.Reader createReader(FileSystem fs) throws IOException  {
+    return new SetFile.Reader(fs, FILE, 
+        WritableComparator.get(IntWritable.class), conf);    
+  }
+  
+  @SuppressWarnings("deprecation")
+  private void writeData(FileSystem fs, int elementSize) throws IOException {
+    MapFile.delete(fs, FILE);    
+    SetFile.Writer writer = new SetFile.Writer(fs, FILE, IntWritable.class);
+    for (int i = 0; i < elementSize; i++)
+      writer.append(new IntWritable(i));
+    writer.close();    
+  }
 
   private static RandomDatum[] generate(int count) {
     LOG.info("generating " + count + " records in memory");

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java Wed Oct 30 22:21:59 2013
@@ -19,11 +19,12 @@
 package org.apache.hadoop.io;
 
 import junit.framework.TestCase;
-
 import java.io.IOException;
+import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.util.Random;
+import com.google.common.primitives.Bytes;
 
 /** Unit tests for LargeUTF8. */
 public class TestText extends TestCase {
@@ -321,7 +322,81 @@ public class TestText extends TestCase {
       (new Text("foo"),
        "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}");
   }
-
+  
+  /**
+   * 
+   */
+  public void testCharAt() {
+    String line = "adsawseeeeegqewgasddga";
+    Text text = new Text(line);
+    for (int i = 0; i < line.length(); i++) {
+      assertTrue("testCharAt error1 !!!", text.charAt(i) == line.charAt(i));
+    }    
+    assertEquals("testCharAt error2 !!!", -1, text.charAt(-1));    
+    assertEquals("testCharAt error3 !!!", -1, text.charAt(100));
+  }    
+  
+  /**
+   * test {@code Text} readFields/write operations
+   */
+  public void testReadWriteOperations() {
+    String line = "adsawseeeeegqewgasddga";
+    byte[] inputBytes = line.getBytes();       
+    inputBytes = Bytes.concat(new byte[] {(byte)22}, inputBytes);        
+    
+    DataInputBuffer in = new DataInputBuffer();
+    DataOutputBuffer out = new DataOutputBuffer();
+    Text text = new Text(line);
+    try {      
+      in.reset(inputBytes, inputBytes.length);
+      text.readFields(in);      
+    } catch(Exception ex) {
+      fail("testReadFields error !!!");
+    }    
+    try {
+      text.write(out);
+    } catch(IOException ex) {      
+    } catch(Exception ex) {
+      fail("testReadWriteOperations error !!!");
+    }        
+  }
+  
+  /**
+   * test {@code Text.bytesToCodePoint(bytes) } 
+   * with {@code BufferUnderflowException}
+   * 
+   */
+  public void testBytesToCodePoint() {
+    try {
+      ByteBuffer bytes = ByteBuffer.wrap(new byte[] {-2, 45, 23, 12, 76, 89});                                      
+      Text.bytesToCodePoint(bytes);      
+      assertTrue("testBytesToCodePoint error !!!", bytes.position() == 6 );                      
+    } catch (BufferUnderflowException ex) {
+      fail("testBytesToCodePoint unexp exception");
+    } catch (Exception e) {
+      fail("testBytesToCodePoint unexp exception");
+    }    
+  }
+  
+  public void testbytesToCodePointWithInvalidUTF() {
+    try {                 
+      Text.bytesToCodePoint(ByteBuffer.wrap(new byte[] {-2}));
+      fail("testbytesToCodePointWithInvalidUTF error unexp exception !!!");
+    } catch (BufferUnderflowException ex) {      
+    } catch(Exception e) {
+      fail("testbytesToCodePointWithInvalidUTF error unexp exception !!!");
+    }
+  }
+  
+  public void testUtf8Length() {         
+    assertEquals("testUtf8Length1 error   !!!", 1, Text.utf8Length(new String(new char[]{(char)1})));
+    assertEquals("testUtf8Length127 error !!!", 1, Text.utf8Length(new String(new char[]{(char)127})));
+    assertEquals("testUtf8Length128 error !!!", 2, Text.utf8Length(new String(new char[]{(char)128})));
+    assertEquals("testUtf8Length193 error !!!", 2, Text.utf8Length(new String(new char[]{(char)193})));    
+    assertEquals("testUtf8Length225 error !!!", 2, Text.utf8Length(new String(new char[]{(char)225})));
+    assertEquals("testUtf8Length254 error !!!", 2, Text.utf8Length(new String(new char[]{(char)254})));                 
+  }
+  
   public static void main(String[] args)  throws Exception
   {
     TestText test = new TestText("main");

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Wed Oct 30 22:21:59 2013
@@ -280,6 +280,45 @@ public class TestNativeIO {
     FileUtil.setExecutable(testFile, true);
     assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
         NativeIO.Windows.AccessRight.ACCESS_EXECUTE));
+
+    // Validate that access checks work as expected for long paths
+
+    // Assemble a path longer then 260 chars (MAX_PATH)
+    String testFileRelativePath = "";
+    for (int i = 0; i < 15; ++i) {
+      testFileRelativePath += "testfileaccessfolder\\";
+    }
+    testFileRelativePath += "testfileaccess";
+    testFile = new File(TEST_DIR, testFileRelativePath);
+    assertTrue(testFile.getParentFile().mkdirs());
+    assertTrue(testFile.createNewFile());
+
+    // Validate ACCESS_READ
+    FileUtil.setReadable(testFile, false);
+    assertFalse(NativeIO.Windows.access(testFile.getAbsolutePath(),
+        NativeIO.Windows.AccessRight.ACCESS_READ));
+
+    FileUtil.setReadable(testFile, true);
+    assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
+        NativeIO.Windows.AccessRight.ACCESS_READ));
+
+    // Validate ACCESS_WRITE
+    FileUtil.setWritable(testFile, false);
+    assertFalse(NativeIO.Windows.access(testFile.getAbsolutePath(),
+        NativeIO.Windows.AccessRight.ACCESS_WRITE));
+
+    FileUtil.setWritable(testFile, true);
+    assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
+        NativeIO.Windows.AccessRight.ACCESS_WRITE));
+
+    // Validate ACCESS_EXECUTE
+    FileUtil.setExecutable(testFile, false);
+    assertFalse(NativeIO.Windows.access(testFile.getAbsolutePath(),
+        NativeIO.Windows.AccessRight.ACCESS_EXECUTE));
+
+    FileUtil.setExecutable(testFile, true);
+    assertTrue(NativeIO.Windows.access(testFile.getAbsolutePath(),
+        NativeIO.Windows.AccessRight.ACCESS_EXECUTE));
   }
 
   @Test (timeout = 30000)