You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2013/11/08 20:37:36 UTC

svn commit: r1540160 - in /hbase/trunk: hbase-common/src/main/java/org/apache/hadoop/hbase/util/ hbase-common/src/test/java/org/apache/hadoop/hbase/ hbase-common/src/test/java/org/apache/hadoop/hbase/util/ hbase-server/src/main/java/org/apache/hadoop/h...

Author: enis
Date: Fri Nov  8 19:37:35 2013
New Revision: 1540160

URL: http://svn.apache.org/r1540160
Log:
HBASE-9908 [WINDOWS] Fix filesystem / classloader related unit tests

Modified:
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
    hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
    hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java
    hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java Fri Nov  8 19:37:35 2013
@@ -152,7 +152,7 @@ public class CoprocessorClassLoader exte
     synchronized (parentDirLockSet) {
       if (!parentDirLockSet.contains(parentDirStr)) {
         Path parentDir = new Path(parentDirStr);
-        FileSystem fs = parentDir.getFileSystem(conf);
+        FileSystem fs = FileSystem.getLocal(conf);
         fs.delete(parentDir, true); // it's ok if the dir doesn't exist now
         parentDirLockSet.add(parentDirStr);
         if (!fs.mkdirs(parentDir) && !fs.getFileStatus(parentDir).isDir()) {

Modified: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java (original)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java Fri Nov  8 19:37:35 2013
@@ -27,6 +27,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
 /**
@@ -39,6 +40,24 @@ import org.apache.hadoop.fs.Path;
 public class HBaseCommonTestingUtility {
   protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class);
 
+  protected Configuration conf;
+
+  public HBaseCommonTestingUtility() {
+    this(HBaseConfiguration.create());
+  }
+
+  public HBaseCommonTestingUtility(Configuration conf) {
+    this.conf = conf;
+  }
+
+  /**
+   * Returns this classes's instance of {@link Configuration}.
+   * @return Instance of Configuration.
+   */
+  public Configuration getConfiguration() {
+    return this.conf;
+  }
+
   /**
    * System property key to get base test directory value
    */
@@ -95,9 +114,19 @@ public class HBaseCommonTestingUtility {
     // Set this property so if mapreduce jobs run, they will use this as their home dir.
     System.setProperty("test.build.dir", this.dataTestDir.toString());
     if (deleteOnExit()) this.dataTestDir.deleteOnExit();
+
+    createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
+
     return testPath;
   }
 
+  protected void createSubDir(String propertyName, Path parent, String subDirName){
+    Path newPath= new Path(parent, subDirName);
+    File newDir = new File(newPath.toString()).getAbsoluteFile();
+    if (deleteOnExit()) newDir.deleteOnExit();
+    conf.set(propertyName, newDir.getAbsolutePath());
+  }
+
   /**
    * @return True if we should delete testing dirs on exit.
    */

Modified: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java (original)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java Fri Nov  8 19:37:35 2013
@@ -30,7 +30,6 @@ import java.io.FileOutputStream;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.io.IOUtils;
 import org.junit.Test;
@@ -41,9 +40,12 @@ import org.junit.experimental.categories
  */
 @Category(SmallTests.class)
 public class TestCoprocessorClassLoader {
-  private static final Configuration conf = HBaseConfiguration.create();
 
   private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
+  private static final Configuration conf = TEST_UTIL.getConfiguration();
+  static {
+    TEST_UTIL.getDataTestDir(); // prepare data test dir and hbase local dir
+  }
 
   @Test
   public void testCleanupOldJars() throws Exception {

Modified: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java (original)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java Fri Nov  8 19:37:35 2013
@@ -27,7 +27,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.SmallTests;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -39,9 +38,8 @@ import org.junit.experimental.categories
 public class TestDynamicClassLoader {
   private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class);
 
-  private static final Configuration conf = HBaseConfiguration.create();
-
   private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
+  private static final Configuration conf = TEST_UTIL.getConfiguration();
 
   static {
     conf.set("hbase.dynamic.jars.dir", TEST_UTIL.getDataTestDir().toString());

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java Fri Nov  8 19:37:35 2013
@@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-import org.apache.hadoop.hbase.regionserver.compactions.Compactor;
 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
 import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
 import org.apache.hadoop.hbase.regionserver.wal.HLogUtil;
@@ -819,7 +818,7 @@ public class HStore implements Store {
             .build();
     return w;
   }
-  
+
   private HFileContext createFileContext(Compression.Algorithm compression,
       boolean includeMVCCReadpoint, boolean includesTag) {
     if (compression == null) {
@@ -1014,6 +1013,7 @@ public class HStore implements Store {
         for (Path newFile : newFiles) {
           // Create storefile around what we wrote with a reader on it.
           StoreFile sf = createStoreFileAndReader(newFile);
+          sf.closeReader(true);
           sfs.add(sf);
         }
         return sfs;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Fri Nov  8 19:37:35 2013
@@ -125,7 +125,6 @@ import org.apache.zookeeper.ZooKeeper.St
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class HBaseTestingUtility extends HBaseCommonTestingUtility {
-   protected Configuration conf;
    private MiniZooKeeperCluster zkCluster = null;
 
   /**
@@ -227,7 +226,7 @@ public class HBaseTestingUtility extends
   }
 
   public HBaseTestingUtility(Configuration conf) {
-    this.conf = conf;
+    super(conf);
 
     // a hbase checksum verification failure will cause unit tests to fail
     ChecksumUtil.generateExceptionForChecksumFailureForTest(true);
@@ -270,8 +269,9 @@ public class HBaseTestingUtility extends
    * <code>Configuration c = new Configuration(INSTANCE.getConfiguration());</code>
    * @return Instance of Configuration.
    */
+  @Override
   public Configuration getConfiguration() {
-    return this.conf;
+    return super.getConfiguration();
   }
 
   public void setHBaseCluster(HBaseCluster hbaseCluster) {
@@ -316,19 +316,9 @@ public class HBaseTestingUtility extends
       "mapred.local.dir",
       testPath, "mapred-local-dir");
 
-    createSubDir(
-      "hbase.local.dir",
-      testPath, "hbase-local-dir");
     return testPath;
   }
 
-  private void createSubDir(String propertyName, Path parent, String subDirName){
-    Path newPath= new Path(parent, subDirName);
-    File newDir = new File(newPath.toString()).getAbsoluteFile();
-    if (deleteOnExit()) newDir.deleteOnExit();
-    conf.set(propertyName, newDir.getAbsolutePath());
-  }
-
   private void createSubDirAndSystemProperty(
     String propertyName, Path parent, String subDirName){
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java Fri Nov  8 19:37:35 2013
@@ -33,11 +33,11 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
@@ -343,7 +343,7 @@ public class TestHFileArchiving {
     final long TEST_TIME = 20 * 1000;
 
     Configuration conf = UTIL.getMiniHBaseCluster().getMaster().getConfiguration();
-    Path rootDir = UTIL.getDataTestDir("testCleaningRace");
+    Path rootDir = UTIL.getDataTestDirOnTestFS("testCleaningRace");
     FileSystem fs = UTIL.getTestFileSystem();
 
     Path archiveDir = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java Fri Nov  8 19:37:35 2013
@@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.util.Byte
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.hbase.HConstants;
 
 /**
  * This class runs performance benchmarks for {@link HLog}.
@@ -62,7 +61,7 @@ import org.apache.hadoop.hbase.HConstant
 public final class HLogPerformanceEvaluation extends Configured implements Tool {
   static final Log LOG = LogFactory.getLog(HLogPerformanceEvaluation.class.getName());
 
-  private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private HBaseTestingUtility TEST_UTIL;
 
   static final String TABLE_NAME = "HLogPerformanceEvaluation";
   static final String QUALIFIER_PREFIX = "q";
@@ -72,6 +71,12 @@ public final class HLogPerformanceEvalua
   private int valueSize = 512;
   private int keySize = 16;
 
+  @Override
+  public void setConf(Configuration conf) {
+    super.setConf(conf);
+    TEST_UTIL = new HBaseTestingUtility(conf);
+  }
+
   /**
    * Perform HLog.append() of Put object, for the number of iterations requested.
    * Keys and Vaues are generated randomly, the number of column families,
@@ -93,6 +98,7 @@ public final class HLogPerformanceEvalua
       this.htd = htd;
     }
 
+    @Override
     public void run() {
       byte[] key = new byte[keySize];
       byte[] value = new byte[valueSize];
@@ -184,7 +190,7 @@ public final class HLogPerformanceEvalua
     LOG.info("FileSystem: " + fs);
     try {
       if (rootRegionDir == null) {
-        rootRegionDir = TEST_UTIL.getDataTestDir("HLogPerformanceEvaluation");
+        rootRegionDir = TEST_UTIL.getDataTestDirOnTestFS("HLogPerformanceEvaluation");
       }
       rootRegionDir = rootRegionDir.makeQualified(fs);
       cleanRegionRootDir(fs, rootRegionDir);
@@ -193,6 +199,7 @@ public final class HLogPerformanceEvalua
       final long whenToRoll = roll;
       HLog hlog = new FSHLog(fs, rootRegionDir, "wals", getConf()) {
         int appends = 0;
+        @Override
         protected void doWrite(HRegionInfo info, HLogKey logKey, WALEdit logEdit,
             HTableDescriptor htd)
         throws IOException {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java Fri Nov  8 19:37:35 2013
@@ -18,14 +18,13 @@
 
 package org.apache.hadoop.hbase.regionserver.wal;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -51,7 +50,7 @@ public class TestDurability {
   private static FileSystem FS;
   private static MiniDFSCluster CLUSTER;
   private static Configuration CONF;
-  private static final Path DIR = TEST_UTIL.getDataTestDir("TestDurability");
+  private static Path DIR;
 
   private static byte[] FAMILY = Bytes.toBytes("family");
   private static byte[] ROW = Bytes.toBytes("row");
@@ -66,6 +65,7 @@ public class TestDurability {
 
     CLUSTER = TEST_UTIL.getDFSCluster();
     FS = CLUSTER.getFileSystem();
+    DIR = TEST_UTIL.getDataTestDirOnTestFS("TestDurability");
   }
 
   @AfterClass
@@ -162,7 +162,7 @@ public class TestDurability {
           throw new IOException("Failed delete of " + path);
         }
       }
-      return HRegion.createHRegion(info, path, HBaseConfiguration.create(), htd, log);
+      return HRegion.createHRegion(info, path, CONF, htd, log);
     }
 
 }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java Fri Nov  8 19:37:35 2013
@@ -235,12 +235,14 @@ public class TestAccessController extend
     assertEquals(0, AccessControlLists.getTablePermissions(conf, TEST_TABLE.getTableName()).size());
   }
 
+  @Override
   public void verifyAllowed(PrivilegedExceptionAction action, User... users) throws Exception {
     for (User user : users) {
       verifyAllowed(user, action);
     }
   }
 
+  @Override
   public void verifyDenied(User user, PrivilegedExceptionAction... actions) throws Exception {
     for (PrivilegedExceptionAction action : actions) {
       try {
@@ -291,6 +293,7 @@ public class TestAccessController extend
     }
   }
 
+  @Override
   public void verifyDenied(PrivilegedExceptionAction action, User... users) throws Exception {
     for (User user : users) {
       verifyDenied(user, action);
@@ -300,6 +303,7 @@ public class TestAccessController extend
   @Test
   public void testTableCreate() throws Exception {
     PrivilegedExceptionAction createTable = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testnewtable"));
         htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
@@ -318,6 +322,7 @@ public class TestAccessController extend
   @Test
   public void testTableModify() throws Exception {
     PrivilegedExceptionAction modifyTable = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName());
         htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
@@ -335,6 +340,7 @@ public class TestAccessController extend
   @Test
   public void testTableDelete() throws Exception {
     PrivilegedExceptionAction deleteTable = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER
             .preDeleteTable(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName());
@@ -350,6 +356,7 @@ public class TestAccessController extend
   public void testAddColumn() throws Exception {
     final HColumnDescriptor hcd = new HColumnDescriptor("fam_new");
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preAddColumn(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName(),
           hcd);
@@ -366,6 +373,7 @@ public class TestAccessController extend
     final HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
     hcd.setMaxVersions(10);
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preModifyColumn(ObserverContext.createAndPrepare(CP_ENV, null),
           TEST_TABLE.getTableName(), hcd);
@@ -380,6 +388,7 @@ public class TestAccessController extend
   @Test
   public void testDeleteColumn() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preDeleteColumn(ObserverContext.createAndPrepare(CP_ENV, null),
           TEST_TABLE.getTableName(), TEST_FAMILY);
@@ -394,6 +403,7 @@ public class TestAccessController extend
   @Test
   public void testTableDisable() throws Exception {
     PrivilegedExceptionAction disableTable = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV, null),
           TEST_TABLE.getTableName());
@@ -402,6 +412,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction disableAclTable = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preDisableTable(ObserverContext.createAndPrepare(CP_ENV, null),
             AccessControlLists.ACL_TABLE_NAME);
@@ -419,6 +430,7 @@ public class TestAccessController extend
   @Test
   public void testTableEnable() throws Exception {
     PrivilegedExceptionAction enableTable = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER
             .preEnableTable(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE.getTableName());
@@ -442,6 +454,7 @@ public class TestAccessController extend
     final Map.Entry<HRegionInfo, ServerName> firstRegion = regions.entrySet().iterator().next();
     final ServerName server = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preMove(ObserverContext.createAndPrepare(CP_ENV, null),
           firstRegion.getKey(), server, server);
@@ -465,6 +478,7 @@ public class TestAccessController extend
     final Map.Entry<HRegionInfo, ServerName> firstRegion = regions.entrySet().iterator().next();
 
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preAssign(ObserverContext.createAndPrepare(CP_ENV, null),
           firstRegion.getKey());
@@ -488,6 +502,7 @@ public class TestAccessController extend
     final Map.Entry<HRegionInfo, ServerName> firstRegion = regions.entrySet().iterator().next();
 
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preUnassign(ObserverContext.createAndPrepare(CP_ENV, null),
           firstRegion.getKey(), false);
@@ -511,6 +526,7 @@ public class TestAccessController extend
     final Map.Entry<HRegionInfo, ServerName> firstRegion = regions.entrySet().iterator().next();
 
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preRegionOffline(ObserverContext.createAndPrepare(CP_ENV, null),
           firstRegion.getKey());
@@ -525,6 +541,7 @@ public class TestAccessController extend
   @Test
   public void testBalance() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preBalance(ObserverContext.createAndPrepare(CP_ENV, null));
         return null;
@@ -538,6 +555,7 @@ public class TestAccessController extend
   @Test
   public void testBalanceSwitch() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preBalanceSwitch(ObserverContext.createAndPrepare(CP_ENV, null), true);
         return null;
@@ -551,6 +569,7 @@ public class TestAccessController extend
   @Test
   public void testShutdown() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preShutdown(ObserverContext.createAndPrepare(CP_ENV, null));
         return null;
@@ -564,6 +583,7 @@ public class TestAccessController extend
   @Test
   public void testStopMaster() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preStopMaster(ObserverContext.createAndPrepare(CP_ENV, null));
         return null;
@@ -582,6 +602,7 @@ public class TestAccessController extend
   @Test
   public void testSplit() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preSplit(ObserverContext.createAndPrepare(RCP_ENV, null));
         return null;
@@ -595,6 +616,7 @@ public class TestAccessController extend
   @Test
   public void testSplitWithSplitRow() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preSplit(
             ObserverContext.createAndPrepare(RCP_ENV, null),
@@ -611,6 +633,7 @@ public class TestAccessController extend
   @Test
   public void testFlush() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preFlush(ObserverContext.createAndPrepare(RCP_ENV, null));
         return null;
@@ -624,6 +647,7 @@ public class TestAccessController extend
   @Test
   public void testCompact() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preCompact(ObserverContext.createAndPrepare(RCP_ENV, null), null, null,
           ScanType.COMPACT_RETAIN_DELETES);
@@ -638,6 +662,7 @@ public class TestAccessController extend
   @Test
   public void testPreCompactSelection() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preCompactSelection(ObserverContext.createAndPrepare(RCP_ENV, null), null, null);
         return null;
@@ -662,6 +687,7 @@ public class TestAccessController extend
   public void testRead() throws Exception {
     // get action
     PrivilegedExceptionAction getAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Get g = new Get(Bytes.toBytes("random_row"));
         g.addFamily(TEST_FAMILY);
@@ -678,6 +704,7 @@ public class TestAccessController extend
 
     // action for scanning
     PrivilegedExceptionAction scanAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Scan s = new Scan();
         s.addFamily(TEST_FAMILY);
@@ -707,6 +734,7 @@ public class TestAccessController extend
   public void testWrite() throws Exception {
     // put action
     PrivilegedExceptionAction putAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Put p = new Put(Bytes.toBytes("random_row"));
         p.add(TEST_FAMILY, Bytes.toBytes("Qualifier"), Bytes.toBytes(1));
@@ -723,6 +751,7 @@ public class TestAccessController extend
 
     // delete action
     PrivilegedExceptionAction deleteAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Delete d = new Delete(Bytes.toBytes("random_row"));
         d.deleteFamily(TEST_FAMILY);
@@ -739,6 +768,7 @@ public class TestAccessController extend
 
     // increment action
     PrivilegedExceptionAction incrementAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Increment inc = new Increment(Bytes.toBytes("random_row"));
         inc.addColumn(TEST_FAMILY, Bytes.toBytes("Qualifier"), 1);
@@ -758,6 +788,7 @@ public class TestAccessController extend
   public void testReadWrite() throws Exception {
     // action for checkAndDelete
     PrivilegedExceptionAction checkAndDeleteAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Delete d = new Delete(Bytes.toBytes("random_row"));
         d.deleteFamily(TEST_FAMILY);
@@ -775,6 +806,7 @@ public class TestAccessController extend
 
     // action for checkAndPut()
     PrivilegedExceptionAction checkAndPut = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Put p = new Put(Bytes.toBytes("random_row"));
         p.add(TEST_FAMILY, Bytes.toBytes("Qualifier"), Bytes.toBytes(1));
@@ -794,13 +826,14 @@ public class TestAccessController extend
   @Test
   public void testBulkLoad() throws Exception {
     FileSystem fs = TEST_UTIL.getTestFileSystem();
-    final Path dir = TEST_UTIL.getDataTestDir("testBulkLoad");
+    final Path dir = TEST_UTIL.getDataTestDirOnTestFS("testBulkLoad");
     fs.mkdirs(dir);
     //need to make it globally writable
     //so users creating HFiles have write permissions
     fs.setPermission(dir, FsPermission.valueOf("-rwxrwxrwx"));
 
     PrivilegedExceptionAction bulkLoadAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         int numRows = 3;
 
@@ -907,6 +940,7 @@ public class TestAccessController extend
   public void testAppend() throws Exception {
 
     PrivilegedExceptionAction appendAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         byte[] row = Bytes.toBytes("random_row");
         byte[] qualifier = Bytes.toBytes("q");
@@ -933,6 +967,7 @@ public class TestAccessController extend
   public void testGrantRevoke() throws Exception {
 
     PrivilegedExceptionAction grantAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME);
         try {
@@ -949,6 +984,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction revokeAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME);
         try {
@@ -965,6 +1001,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction getPermissionsAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME);
         try {
@@ -1016,6 +1053,7 @@ public class TestAccessController extend
 
     // prepare actions:
     PrivilegedExceptionAction putActionAll = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Put p = new Put(Bytes.toBytes("a"));
         p.add(family1, qualifier, Bytes.toBytes("v1"));
@@ -1030,6 +1068,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction putAction1 = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Put p = new Put(Bytes.toBytes("a"));
         p.add(family1, qualifier, Bytes.toBytes("v1"));
@@ -1043,6 +1082,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction putAction2 = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Put p = new Put(Bytes.toBytes("a"));
         p.add(family2, qualifier, Bytes.toBytes("v2"));
@@ -1056,6 +1096,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction getActionAll = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Get g = new Get(Bytes.toBytes("random_row"));
         g.addFamily(family1);
@@ -1070,6 +1111,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction getAction1 = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Get g = new Get(Bytes.toBytes("random_row"));
         g.addFamily(family1);
@@ -1083,6 +1125,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction getAction2 = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Get g = new Get(Bytes.toBytes("random_row"));
         g.addFamily(family2);
@@ -1096,6 +1139,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction deleteActionAll = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Delete d = new Delete(Bytes.toBytes("random_row"));
         d.deleteFamily(family1);
@@ -1110,6 +1154,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction deleteAction1 = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Delete d = new Delete(Bytes.toBytes("random_row"));
         d.deleteFamily(family1);
@@ -1123,6 +1168,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction deleteAction2 = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Delete d = new Delete(Bytes.toBytes("random_row"));
         d.deleteFamily(family2);
@@ -1327,6 +1373,7 @@ public class TestAccessController extend
     User user = User.createUserForTesting(TEST_UTIL.getConfiguration(), "user", new String[0]);
 
     PrivilegedExceptionAction getQualifierAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Get g = new Get(Bytes.toBytes("random_row"));
         g.addColumn(family1, qualifier);
@@ -1340,6 +1387,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction putQualifierAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Put p = new Put(Bytes.toBytes("random_row"));
         p.add(family1, qualifier, Bytes.toBytes("v1"));
@@ -1353,6 +1401,7 @@ public class TestAccessController extend
       }
     };
     PrivilegedExceptionAction deleteQualifierAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         Delete d = new Delete(Bytes.toBytes("random_row"));
         d.deleteColumn(family1, qualifier);
@@ -1832,6 +1881,7 @@ public class TestAccessController extend
   @Test
   public void testStopRegionServer() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preStopRegionServer(ObserverContext.createAndPrepare(RSCP_ENV, null));
         return null;
@@ -1845,6 +1895,7 @@ public class TestAccessController extend
   @Test
   public void testOpenRegion() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preOpen(ObserverContext.createAndPrepare(RCP_ENV, null));
         return null;
@@ -1858,6 +1909,7 @@ public class TestAccessController extend
   @Test
   public void testCloseRegion() throws Exception {
     PrivilegedExceptionAction action = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preClose(ObserverContext.createAndPrepare(RCP_ENV, null), false);
         return null;
@@ -1871,6 +1923,7 @@ public class TestAccessController extend
   @Test
   public void testSnapshot() throws Exception {
     PrivilegedExceptionAction snapshotAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
           null, null);
@@ -1879,6 +1932,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction deleteAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preDeleteSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
           null);
@@ -1887,6 +1941,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction restoreAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preRestoreSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
           null, null);
@@ -1895,6 +1950,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction cloneAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         ACCESS_CONTROLLER.preCloneSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
           null, null);
@@ -1956,6 +2012,7 @@ public class TestAccessController extend
           .iterator().next();
 
       PrivilegedExceptionAction moveAction = new PrivilegedExceptionAction() {
+        @Override
         public Object run() throws Exception {
           admin.move(firstRegion.getKey().getEncodedNameAsBytes(),
               Bytes.toBytes(newRs.getServerName().getServerName()));
@@ -1981,6 +2038,7 @@ public class TestAccessController extend
       // Verify write permission for user "admin2" who has the global
       // permissions.
       PrivilegedExceptionAction putAction = new PrivilegedExceptionAction() {
+        @Override
         public Object run() throws Exception {
           Put put = new Put(Bytes.toBytes("test"));
           put.add(TEST_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
@@ -2011,6 +2069,7 @@ public class TestAccessController extend
     }
 
     PrivilegedExceptionAction listTablesAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
         try {
@@ -2023,6 +2082,7 @@ public class TestAccessController extend
     };
 
     PrivilegedExceptionAction getTableDescAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
         try {
@@ -2058,6 +2118,7 @@ public class TestAccessController extend
     }
 
     PrivilegedExceptionAction deleteTableAction = new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
         try {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java?rev=1540160&r1=1540159&r2=1540160&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java Fri Nov  8 19:37:35 2013
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.util;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNotEquals;
 
 import java.io.File;
 import java.io.IOException;
@@ -52,7 +52,7 @@ import org.junit.experimental.categories
 public class TestFSUtils {
   /**
    * Test path compare and prefix checking.
-   * @throws IOException 
+   * @throws IOException
    */
   @Test
   public void testMatchingTail() throws IOException {
@@ -115,7 +115,7 @@ public class TestFSUtils {
       if (cluster != null) cluster.shutdown();
     }
   }
-  
+
   private void WriteDataToHDFS(FileSystem fs, Path file, int dataSize)
     throws Exception {
     FSDataOutputStream out = fs.create(file);
@@ -123,14 +123,14 @@ public class TestFSUtils {
     out.write(data, 0, dataSize);
     out.close();
   }
-  
+
   @Test public void testcomputeHDFSBlocksDistribution() throws Exception {
     HBaseTestingUtility htu = new HBaseTestingUtility();
     final int DEFAULT_BLOCK_SIZE = 1024;
     htu.getConfiguration().setLong("dfs.block.size", DEFAULT_BLOCK_SIZE);
     MiniDFSCluster cluster = null;
     Path testFile = null;
-    
+
     try {
       // set up a cluster with 3 nodes
       String hosts[] = new String[] { "host1", "host2", "host3" };
@@ -141,7 +141,7 @@ public class TestFSUtils {
       // create a file with two blocks
       testFile = new Path("/test1.txt");
       WriteDataToHDFS(fs, testFile, 2*DEFAULT_BLOCK_SIZE);
-      
+
       // given the default replication factor is 3, the same as the number of
       // datanodes; the locality index for each host should be 100%,
       // or getWeight for each host should be the same as getUniqueBlocksWeights
@@ -173,9 +173,9 @@ public class TestFSUtils {
       FileSystem fs = cluster.getFileSystem();
 
       // create a file with three blocks
-      testFile = new Path("/test2.txt");        
+      testFile = new Path("/test2.txt");
       WriteDataToHDFS(fs, testFile, 3*DEFAULT_BLOCK_SIZE);
-              
+
       // given the default replication factor is 3, we will have total of 9
       // replica of blocks; thus the host with the highest weight should have
       // weight == 3 * DEFAULT_BLOCK_SIZE
@@ -199,7 +199,7 @@ public class TestFSUtils {
       htu.shutdownMiniDFSCluster();
     }
 
-    
+
     try {
       // set up a cluster with 4 nodes
       String hosts[] = new String[] { "host1", "host2", "host3", "host4" };
@@ -208,9 +208,9 @@ public class TestFSUtils {
       FileSystem fs = cluster.getFileSystem();
 
       // create a file with one block
-      testFile = new Path("/test3.txt");        
+      testFile = new Path("/test3.txt");
       WriteDataToHDFS(fs, testFile, DEFAULT_BLOCK_SIZE);
-      
+
       // given the default replication factor is 3, we will have total of 3
       // replica of blocks; thus there is one host without weight
       final long maxTime = System.currentTimeMillis() + 2000;
@@ -257,7 +257,7 @@ public class TestFSUtils {
       fs.delete(p, true);
     }
   }
-  
+
   @Test
   public void testDeleteAndExists() throws Exception {
     HBaseTestingUtility htu = new HBaseTestingUtility();
@@ -292,30 +292,30 @@ public class TestFSUtils {
   public void testRenameAndSetModifyTime() throws Exception {
     HBaseTestingUtility htu = new HBaseTestingUtility();
     Configuration conf = htu.getConfiguration();
-        
+
     MiniDFSCluster cluster = htu.startMiniDFSCluster(1);
     assertTrue(FSUtils.isHDFS(conf));
 
     FileSystem fs = FileSystem.get(conf);
-    Path testDir = htu.getDataTestDir("testArchiveFile");
-    
+    Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");
+
     String file = UUID.randomUUID().toString();
     Path p = new Path(testDir, file);
 
     FSDataOutputStream out = fs.create(p);
     out.close();
     assertTrue("The created file should be present", FSUtils.isExists(fs, p));
-    
+
     long expect = System.currentTimeMillis() + 1000;
     assertNotEquals(expect, fs.getFileStatus(p).getModificationTime());
-    
+
     ManualEnvironmentEdge mockEnv = new ManualEnvironmentEdge();
     mockEnv.setValue(expect);
     EnvironmentEdgeManager.injectEdge(mockEnv);
-    
+
     String dstFile = UUID.randomUUID().toString();
     Path dst = new Path(testDir , dstFile);
-    
+
     assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst));
     assertFalse("The moved file should not be present", FSUtils.isExists(fs, p));
     assertTrue("The dst file should be present", FSUtils.isExists(fs, dst));