You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by db...@apache.org on 2015/02/12 06:48:50 UTC

[4/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across test-artifacts

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
index 557266b..cc9a513 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
@@ -20,6 +20,7 @@ package org.apache.bigtop.itest;
 
 import org.junit.BeforeClass;
 import org.junit.Test;
+
 import static org.junit.Assert.*;
 
 import org.apache.bigtop.itest.Contract;
@@ -28,62 +29,62 @@ import org.apache.bigtop.itest.Property;
 import org.apache.bigtop.itest.Variable;
 
 @Contract(
-        properties = {
-                @Property(name="foo.int1", type=Property.Type.INT, intValue=1000),
-                @Property(name="foo.int2", type=Property.Type.INT),
-                @Property(name="foo.bar1", type=Property.Type.STRING, defaultValue="xyz"),
-                @Property(name="foo.bar2", type=Property.Type.STRING),
-                @Property(name="foo.bool1", type=Property.Type.BOOLEAN),
-                @Property(name="foo.bool2", type=Property.Type.BOOLEAN)
-        },
-        env = {
-                @Variable(name="HOME"),
-                @Variable(name="BIGTOP_UNLIKELY_FOO_ENV", required=false)
-        }
+    properties = {
+        @Property(name = "foo.int1", type = Property.Type.INT, intValue = 1000),
+        @Property(name = "foo.int2", type = Property.Type.INT),
+        @Property(name = "foo.bar1", type = Property.Type.STRING, defaultValue = "xyz"),
+        @Property(name = "foo.bar2", type = Property.Type.STRING),
+        @Property(name = "foo.bool1", type = Property.Type.BOOLEAN),
+        @Property(name = "foo.bool2", type = Property.Type.BOOLEAN)
+    },
+    env = {
+        @Variable(name = "HOME"),
+        @Variable(name = "BIGTOP_UNLIKELY_FOO_ENV", required = false)
+    }
 )
 public class TestContractJavaProc {
-    public static int foo_int1;
-    public static int foo_int2;
-    protected static String foo_bar1;
-    protected static String foo_bar2;
-    private static boolean foo_bool1;
-    private static boolean foo_bool2;
+  public static int foo_int1;
+  public static int foo_int2;
+  protected static String foo_bar1;
+  protected static String foo_bar2;
+  private static boolean foo_bool1;
+  private static boolean foo_bool2;
 
-    static String HOME;
-    static String BIGTOP_UNLIKELY_FOO_ENV;
+  static String HOME;
+  static String BIGTOP_UNLIKELY_FOO_ENV;
 
-    @BeforeClass
-    public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
-        System.setProperty("foo.int2", "100");
-        System.setProperty("foo.bool2", "true");
+  @BeforeClass
+  public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
+    System.setProperty("foo.int2", "100");
+    System.setProperty("foo.bool2", "true");
 
-        ParameterSetter.setProperties(TestContractJavaProc.class,
-                new String[] { "foo_int1", "foo_int2", "foo_bar1", "foo_bar2", "foo_bool1", "foo_bool2" });
-        ParameterSetter.setEnv(TestContractJavaProc.class,
-                new String[] { "HOME", "BIGTOP_UNLIKELY_FOO_ENV"});
-    }
+    ParameterSetter.setProperties(TestContractJavaProc.class,
+        new String[]{"foo_int1", "foo_int2", "foo_bar1", "foo_bar2", "foo_bool1", "foo_bool2"});
+    ParameterSetter.setEnv(TestContractJavaProc.class,
+        new String[]{"HOME", "BIGTOP_UNLIKELY_FOO_ENV"});
+  }
 
-    @Test
-    public void testPropSettings() {
-        assertEquals("checking the value of foo_int1 from default value",
-                1000, foo_int1);
-        assertEquals("checking the value of foo_int2 from foo.int2",
-                100, foo_int2);
-        assertEquals("checking the value of foo_bar1 from default value",
-                "xyz", foo_bar1);
-        assertEquals("checking the value of foo_bar2 from unset value",
-                "", foo_bar2);
-        assertEquals("checking the value of foo_bool1 from unset value",
-                false, foo_bool1);
-        assertEquals("checking the value of foo_bar2 from foo.bool2",
-                true, foo_bool2);
-    }
+  @Test
+  public void testPropSettings() {
+    assertEquals("checking the value of foo_int1 from default value",
+        1000, foo_int1);
+    assertEquals("checking the value of foo_int2 from foo.int2",
+        100, foo_int2);
+    assertEquals("checking the value of foo_bar1 from default value",
+        "xyz", foo_bar1);
+    assertEquals("checking the value of foo_bar2 from unset value",
+        "", foo_bar2);
+    assertEquals("checking the value of foo_bool1 from unset value",
+        false, foo_bool1);
+    assertEquals("checking the value of foo_bar2 from foo.bool2",
+        true, foo_bool2);
+  }
 
-    @Test
-    public void testEnvSettings() {
-        assertEquals("checking the value of $HOME",
-                System.getenv("HOME"), HOME);
-        assertEquals("checking the value of $BIGTOP_UNLIKELY_FOO_ENV",
-                null, BIGTOP_UNLIKELY_FOO_ENV);
-    }
+  @Test
+  public void testEnvSettings() {
+    assertEquals("checking the value of $HOME",
+        System.getenv("HOME"), HOME);
+    assertEquals("checking the value of $BIGTOP_UNLIKELY_FOO_ENV",
+        null, BIGTOP_UNLIKELY_FOO_ENV);
+  }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
index 29344cd..1a7a2c7 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
@@ -34,7 +34,7 @@ public class TestListUtilsTest {
 
     TestListUtils.touchTestFiles(prefix, "${fileName}.xml");
     assertTrue("only .class files are expected to be created",
-               expectedFile.getParentFile().listFiles().size() == 0);
+      expectedFile.getParentFile().listFiles().size() == 0);
 
     File p = new File(prefix);
     p.deleteDir();

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
index a36cc1a..60be92a 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
@@ -67,7 +67,7 @@ public class IntegrationTestClusterFailures {
       Thread.sleep(SLEEP_TIME)
     }
 
-    try{
+    try {
       assert !isCronRunning(), "$CRON_SERVICE hasn't been stopped as expected:"
       println "$CRON_SERVICE stopped. Good."
     } finally {
@@ -104,7 +104,7 @@ public class IntegrationTestClusterFailures {
       Thread.sleep(SLEEP_TIME)
     }
 
-    try{
+    try {
       assert !isCronRunning(), "$CRON_SERVICE hasn't been killed as expected:"
       println "$CRON_SERVICE killed. Good."
     } finally {
@@ -137,7 +137,7 @@ public class IntegrationTestClusterFailures {
       Thread.sleep(SLEEP_TIME)
     }
 
-    try{
+    try {
       assert !isRemoteHostReachable(), "Connection to $testRemoteHost hasn't been killed as expected:"
       println "$testRemoteHost isn't reachable. Good."
     } finally {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
index 0ba3627..16bbd34 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
@@ -30,13 +30,13 @@ class OrderedParameterizedTest {
   int parameter;
   static List order = [];
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   public void lateTest() {
     order.add(1);
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   public void earlyTest() {
     order.add(-1);
@@ -61,6 +61,6 @@ class OrderedParameterizedTest {
   @AfterClass
   static void verifyOrder() {
     assertEquals("tests were NOT executed in the desired order",
-                 [-1, 0, 1], order);
+      [-1, 0, 1], order);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
index 8dbe873..8815b42 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
@@ -29,13 +29,13 @@ import static org.junit.Assert.assertEquals
 class OrderedTest {
   static List order = [];
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   public void lateTest() {
     order.add(1);
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   public void earlyTest() {
     order.add(-1);
@@ -49,6 +49,6 @@ class OrderedTest {
   @AfterClass
   static void verifyOrder() {
     assertEquals("tests were NOT executed in the desired order",
-                 [-1, 0, 1], order);
+      [-1, 0, 1], order);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
index bb8f6d7..5d67fed 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
@@ -45,7 +45,9 @@ class PackageManagerTest {
   void searchForGcc() {
     List<PackageInstance> pkgs = pmgr.search("gcc")
 
-    assertFalse("gcc not found in repository", pkgs.findAll({return it.name =~ /^gcc.*/}).size() == 0)
+    assertFalse("gcc not found in repository", pkgs.findAll({
+      return it.name =~ /^gcc.*/
+    }).size() == 0)
   }
 
   @Test
@@ -83,7 +85,7 @@ class PackageManagerTest {
   void testGetContentList() {
     PackageInstance cron = PackageInstance.getPackageInstance(pmgr, CRON_RPM);
     List<String> list = pmgr.getContentList(cron);
-    list.each { println it};
+    list.each { println it };
 
     assertTrue("cron package is expected to contain at least ten files", list.size() > 10);
   }
@@ -92,10 +94,10 @@ class PackageManagerTest {
   void testGetDocs() {
     PackageInstance cron = PackageInstance.getPackageInstance(pmgr, CRON_RPM);
     List<String> list = pmgr.getDocs(cron);
-    list.each { println it};
+    list.each { println it };
 
     assertTrue("checking for docs in cron package",
-               list.size() > ((pmgr.getType() == "apt") ? -1 : 0));
+      list.size() > ((pmgr.getType() == "apt") ? -1 : 0));
   }
 
   @Test
@@ -104,14 +106,14 @@ class PackageManagerTest {
     Map<String, String> deps = bash.getDeps();
 
     assertTrue("package bash has 0 dependencies. weird.",
-               deps.size() > 0);
+      deps.size() > 0);
   }
 
   @Test
   void testGetConfigs() {
     PackageInstance cron = PackageInstance.getPackageInstance(pmgr, CRON_RPM);
     List<String> list = pmgr.getConfigs(cron);
-    list.each { println it};
+    list.each { println it };
 
     assertTrue("cron package is expected to contain at least a few config files", list.size() > 0);
   }
@@ -121,9 +123,9 @@ class PackageManagerTest {
   void testRepoManagement() {
     String repo_id = "test-repo";
     assertEquals("Can not add repo",
-                 0, pmgr.addBinRepo(repo_id, "http://127.0.0.1", null, "random strings here"));
+      0, pmgr.addBinRepo(repo_id, "http://127.0.0.1", null, "random strings here"));
     assertEquals("Can not remove repo",
-                 0, pmgr.removeBinRepo(repo_id));
+      0, pmgr.removeBinRepo(repo_id));
   }
 
   @Ignore("required sudo")
@@ -131,8 +133,8 @@ class PackageManagerTest {
   void testRepoFileManagement() {
     String repo_id = "test-repo";
     assertEquals("Can not add repo",
-                 0, pmgr.addBinRepo(repo_id, "random strings here"));
+      0, pmgr.addBinRepo(repo_id, "random strings here"));
     assertEquals("Can not remove repo",
-                 0, pmgr.removeBinRepo(repo_id));
+      0, pmgr.removeBinRepo(repo_id));
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
index 316160f..5f0ffcb 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
@@ -33,10 +33,10 @@ class AlternativeTest {
 
     Map groups = Alternative.getAlternatives();
     assertTrue("not a single alternative group found. weird.",
-               groups.size() >0);
+      groups.size() > 0);
     assertTrue("there is no alternative for editor. weird.",
-               groups["editor"] != null);
+      groups["editor"] != null);
     assertTrue("in the editor alternative there are no actuall alternatives",
-               groups["editor"].getAlts().size() > 0);
+      groups["editor"].getAlts().size() > 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
index d2c4fe4..7924313 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
@@ -34,11 +34,11 @@ class ServiceTest {
     assertEquals("wrong service name", name, svc.getName());
   }
 
-  @Ignore("requires chkconfig") 
+  @Ignore("requires chkconfig")
   @Test
   void testRunLevels() {
     List<String> l = svc.getRunLevels();
     assertTrue("Expected a non-zero size list of registered run levels for ssh service",
-               0 != l.size());
+      0 != l.size());
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
index 0545dae..6c38881 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
@@ -27,12 +27,12 @@ class UGITest {
   @Test
   void testUsers() {
     assertEquals("expect root uid to be 0",
-                 "0", ugi.getUsers()["root"]["uid"]);
+      "0", ugi.getUsers()["root"]["uid"]);
   }
 
   @Test
   void testGroups() {
     assertEquals("expect root gid to be 0",
-                 "0", ugi.getGroups()["root"]["gid"]);
+      "0", ugi.getGroups()["root"]["gid"]);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy b/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
index 7b7012c..7edca58 100644
--- a/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
+++ b/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
@@ -29,7 +29,7 @@ public class TestCrunchSmoke {
   static String runnerScript = "hadoop jar"
 
   static String crunchJar = System.getProperty(
-    "org.apache.bigtop.itest.crunch.smoke.crunch.jar", 
+    "org.apache.bigtop.itest.crunch.smoke.crunch.jar",
     "/usr/share/doc/crunch*/crunch-examples-*job.jar");
 
   static Shell sh = new Shell("/bin/bash -s");
@@ -43,37 +43,37 @@ public class TestCrunchSmoke {
 
   static Map examples =
     [
-        WordCount             : "${EXAMPLES}/text/pg11.txt $EXAMPLES_OUT",
-        SecondarySortExample  : "${EXAMPLES}/text/secondary_sort_input.txt ${EXAMPLES_OUT}",
-        AverageBytesByIP      : "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}",
-        TotalBytesByIP        : "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}"
+      WordCount: "${EXAMPLES}/text/pg11.txt $EXAMPLES_OUT",
+      SecondarySortExample: "${EXAMPLES}/text/secondary_sort_input.txt ${EXAMPLES_OUT}",
+      AverageBytesByIP: "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}",
+      TotalBytesByIP: "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}"
     ];
 
   private void _runExampleJobs(String algorithm) {
     sh.exec("hadoop fs -rmr ${EXAMPLES_OUT}");
-    sh.exec("${runnerScript} ${crunchJar}" 
+    sh.exec("${runnerScript} ${crunchJar}"
       + " org.apache.crunch.examples.${algorithm}"
       + " ${examples.get(algorithm)}"
-      );
+    );
     assertEquals("running Crunch example failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testWordCount() {
     _runExampleJobs("WordCount");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSecondarySort() {
     _runExampleJobs("SecondarySortExample");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testAverageBytesByIP() {
     _runExampleJobs("AverageBytesByIP");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testTotalBytesByIP() {
     _runExampleJobs("TotalBytesByIP");
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy b/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
index 6718146..85a016e 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
+++ b/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
@@ -42,7 +42,7 @@ class TestFlumeSmoke {
 
   @BeforeClass
   static void setUp() {
-    JarContent.unpackJarContainer(TestFlumeSmoke.class, '.' , null);
+    JarContent.unpackJarContainer(TestFlumeSmoke.class, '.', null);
   }
 
   @AfterClass
@@ -54,27 +54,27 @@ class TestFlumeSmoke {
     String node_config = "node:text(\"events.txt\")|collectorSink(\"${hdfs_sink_dir}\",\"data\");";
 
     sh.exec("export FLUME_CONF_DIR=./${id}",
-            "flume node_nowatch -s -1 -n node -c '${node_config}'");
+      "flume node_nowatch -s -1 -n node -c '${node_config}'");
     assertEquals("Flume failed to accept events",
-                 0, sh.ret);
+      0, sh.ret);
 
     sh.exec("hadoop fs -cat ${hdfs_sink_dir}/${glob} | ${decompress} | wc -l");
     assertEquals("Wrong # of lines in output found at ${hdfs_sink_dir}",
-                 "10000", sh.out[0]);
+      "10000", sh.out[0]);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testBzip2() {
     compressionCommonTest("FlumeSmokeBzip2", "bzip2 -d", "*.bz2");
   }
 
   @Ignore("BIGTOP-218")
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testDeflate() {
     compressionCommonTest("FlumeSmokeDeflate", "perl -MCompress::Zlib -e 'undef \$/; print uncompress(<>)'", "*.deflate");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testGzip() {
     compressionCommonTest("FlumeSmokeGzip", "gzip -d", "*.gz");
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
index cfd43e1..4981d25 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
+++ b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
@@ -1,4 +1,5 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 <!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +16,6 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 
 <configuration>
   <property> 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
index bc065da..78d1efc 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
+++ b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
@@ -1,4 +1,5 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 <!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +16,6 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 
 <configuration>
   <property> 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
index 75d7c26..4c3948b 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
+++ b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
@@ -1,4 +1,5 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 <!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +16,6 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 
 <configuration>
   <property> 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy b/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
index dd82c2f..b213130 100644
--- a/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
+++ b/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
@@ -32,7 +32,7 @@ public class TestGiraphSmoke {
 
   static Shell sh = new Shell("/bin/bash -s")
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testPageRankBenchmark() {
     sh.exec("${runnerScript} ${giraphJar}"
       + " org.apache.giraph.benchmark.PageRankBenchmark"
@@ -41,11 +41,11 @@ public class TestGiraphSmoke {
       + " -s 3"        // number of supersteps
       + " -V 100000"   // aggregate vertices
       + " -w 3"        // workers
-      )
+    )
     assertEquals("running PageRankBenchmark failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testRandomMessageBenchmark() {
     sh.exec("${runnerScript} ${giraphJar}"
       + " org.apache.giraph.benchmark.RandomMessageBenchmark"
@@ -56,11 +56,11 @@ public class TestGiraphSmoke {
       + " -w 3"        // workers
       + " -n 10"       // Number of messages per edge
       + " -b 100"      // size of each message in bytes
-      )
+    )
     assertEquals("running RandomMessageBenchmark failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSimpleCheckpointVertex() {
     sh.exec("hadoop fs -rmr ${testDir}");
     sh.exec("${runnerScript} ${giraphJar}"
@@ -69,37 +69,37 @@ public class TestGiraphSmoke {
       + " -s 3"        // number of supersteps
       + " -w 3"        // workers
       + " -o ${testDir}"
-      )
+    )
     assertEquals("running SimpleCheckpointVertex failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSimpleVertexWithWorkerContext() {
     sh.exec("hadoop fs -rmr ${testDir}");
     sh.exec("${runnerScript} ${giraphJar}"
       + " org.apache.giraph.examples.SimpleVertexWithWorkerContext"
       + " ${testDir} 3"
-      )
+    )
     assertEquals("running SimpleCheckpointVertex failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSimpleShortestPathsVertex() {
     // A graph definition: 
     //   [vertex id, vertex value, [[edge1, value1], .. [edgeN, valueN]]] 
-    List graphDescription=[[0, 0, [[1,1], [2,2]]],
-                           [1, 1, [[2,2], [3,3]]],
-                           [2, 2, [[3,3], [4,4]]],
-                           [3, 3, [[4,4], [5,5]]],
-                           [4, 4, [[5,5], [0,0]]],
-                           [5, 5, [[0,0], [1,1]]]];
-    int partitionSize=2;
+    List graphDescription = [[0, 0, [[1, 1], [2, 2]]],
+      [1, 1, [[2, 2], [3, 3]]],
+      [2, 2, [[3, 3], [4, 4]]],
+      [3, 3, [[4, 4], [5, 5]]],
+      [4, 4, [[5, 5], [0, 0]]],
+      [5, 5, [[0, 0], [1, 1]]]];
+    int partitionSize = 2;
 
     sh.exec("hadoop fs -rmr ${testDir}",
-            "hadoop fs -mkdir ${testDir}/input");
+      "hadoop fs -mkdir ${testDir}/input");
 
-    for (int i=0; i<graphDescription.size(); i+=partitionSize)  {
-      String part = graphDescription[i..(i+partitionSize-1)].join("\n");
+    for (int i = 0; i < graphDescription.size(); i += partitionSize) {
+      String part = graphDescription[i..(i + partitionSize - 1)].join("\n");
       int partId = i / partitionSize;
       sh.exec("hadoop fs -put <(echo '${part}') ${testDir}/input/part-m-${partId}");
     }
@@ -109,7 +109,7 @@ public class TestGiraphSmoke {
       + " ${testDir}/input"
       + " ${testDir}/output"
       + " 0 ${graphDescription.size() / partitionSize}"
-      )
+    )
     assertEquals("running SimpleShortestPathsVertex failed", sh.getRet(), 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
index df24e69..8de9b30 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
@@ -68,7 +68,7 @@ public class TestCLI extends CLITestHelper {
     clitestDataDir = new File(TEST_CACHE_DATA_DIR).toURI().toString().replace(' ', '+');
 
     String[] createTestcliDirCmds = {
-        "hadoop fs -mkdir -p "  + TEST_DIR_ABSOLUTE,
+        "hadoop fs -mkdir -p " + TEST_DIR_ABSOLUTE,
         "hadoop fs -chmod 777 " + TEST_DIR_ABSOLUTE
     };
     shHDFS.exec(createTestcliDirCmds);
@@ -81,7 +81,7 @@ public class TestCLI extends CLITestHelper {
     // We can't just use conf.setInt(fs.trash.interval",0) because if trash is
     // enabled on the server, client configuration value is ignored.
     Assert.assertEquals("HDFS trash should be disabled via fs.trash.interval",
-        0, conf.getInt("fs.trash.interval",0));
+        0, conf.getInt("fs.trash.interval", 0));
     Assert.assertEquals("This test needs to be run under root user of hcfs",
         System.getProperty("hcfs.root.username", "hdfs"),
         System.getProperty("user.name"));

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
index 84c9c42..2e06fbb 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
@@ -46,7 +46,7 @@ public class TestFuseDFS {
   private static Shell sh = new Shell("/bin/bash -s");
   private static Shell shRoot = new Shell("/bin/bash -s", "root");
   private static String mount_point = System.
-      getProperty("fuse.dfs.mountpoint", "/tmp/hcfs-test");
+    getProperty("fuse.dfs.mountpoint", "/tmp/hcfs-test");
   static private Log LOG = LogFactory.getLog(Shell.class)
 
   /**
@@ -54,7 +54,7 @@ public class TestFuseDFS {
    * That allows this test to work on any file system, because its not
    * coupled to hadoop-fuse-dfs*/
   private static boolean isHDFS = "HDFS".
-      equals(System.getProperty("HCFS_IMPLEMENTATION", "HDFS"));
+    equals(System.getProperty("HCFS_IMPLEMENTATION", "HDFS"));
   private static String userdir = "${mount_point}/user/${username}";
   private static String testdir = "${userdir}/TestFuseDFS-testDir";
 
@@ -108,7 +108,7 @@ public class TestFuseDFS {
      * we go with them.  But that makes this test somewhat dependant
      * on working FUSE mount to begin with.*/
     sh.exec("mkdir -p ${testdir}");
-    assertEquals("Failed: mkdir basic setup !",0,sh.getRet());
+    assertEquals("Failed: mkdir basic setup !", 0, sh.getRet());
 
     /**
      * some tests will require a file system command to setup the test,
@@ -146,68 +146,68 @@ public class TestFuseDFS {
   @Test
   public void testCd() {
     testWrapper(//The test: Change to a directory.
-        "cd ${testdir} && pwd ",
-        //The lambda: Validates via running pwd.
-        {
-          LOG.info("After cd, pwd=" + sh.getOut()[0]);
-          assertEquals("Failed: testing contains '${testdir}' after change " +
-              "dir", true,
-              sh.getOut()[0].contains("${testdir}"));
-          assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
-        }//validator
+      "cd ${testdir} && pwd ",
+      //The lambda: Validates via running pwd.
+      {
+        LOG.info("After cd, pwd=" + sh.getOut()[0]);
+        assertEquals("Failed: testing contains '${testdir}' after change " +
+          "dir", true,
+          sh.getOut()[0].contains("${testdir}"));
+        assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
+      }//validator
     );
   }
 
   @Test
   public void testLs() {
     testWrapper(
-        "touch ${testdir}/non-trivial-fn",
-        "ls -altrh ${testdir}", //Test command : ls the dir.
-        {
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue("Failed: Confiring that total is shown in ls",
-              sh.getOut()[0].contains("total"));
-          //now, we expect the user name to be in the test
-          // directory, since
-          //user is the one who created the test directory.
-          assertTrue("Failed: Confirming that the non-trivial-fn is shown in " +
-              "ls " +
-              "" +
-              "" + sh
-              .getOut(),
-              sh.getOut().toString().contains("non-trivial-fn"));
-          assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
-        }//validator
+      "touch ${testdir}/non-trivial-fn",
+      "ls -altrh ${testdir}", //Test command : ls the dir.
+      {
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue("Failed: Confiring that total is shown in ls",
+          sh.getOut()[0].contains("total"));
+        //now, we expect the user name to be in the test
+        // directory, since
+        //user is the one who created the test directory.
+        assertTrue("Failed: Confirming that the non-trivial-fn is shown in " +
+          "ls " +
+          "" +
+          "" + sh
+          .getOut(),
+          sh.getOut().toString().contains("non-trivial-fn"));
+        assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
+      }//validator
     );
   }
 
   @Test
   public void testMkDir() {
     testWrapper("mkdir ${testdir}/dir1 && cd ${testdir}/dir1 && pwd",
-        {
-          LOG.info(sh.getOut());
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue("Failed: Confirm that dir1 is the new working dir. ",
-              sh.getOut().toString().contains("${testdir}/dir1"));
-          assertEquals("Failed: mkdir under ${testdir} non-zero return code",
-              0,
-              sh.getRet());
-        } //validator
+      {
+        LOG.info(sh.getOut());
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue("Failed: Confirm that dir1 is the new working dir. ",
+          sh.getOut().toString().contains("${testdir}/dir1"));
+        assertEquals("Failed: mkdir under ${testdir} non-zero return code",
+          0,
+          sh.getRet());
+      } //validator
     );
   }
 
   @Test
   public void testTouch() {
     testWrapper("touch ${testdir}/file1 && ls ${testdir}",
-        {
-          LOG.info(sh.getOut());
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue("Failed: Confirm that file1 is created/listed ",
-              sh.getOut()[0].contains("file1"));
-          assertEquals("Failed: touch ${testdir}/file1 + ls return code " +
-              "non-zero", 0,
-              sh.getRet());
-        }//validator
+      {
+        LOG.info(sh.getOut());
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue("Failed: Confirm that file1 is created/listed ",
+          sh.getOut()[0].contains("file1"));
+        assertEquals("Failed: touch ${testdir}/file1 + ls return code " +
+          "non-zero", 0,
+          sh.getRet());
+      }//validator
     );
   }
 
@@ -222,21 +222,21 @@ public class TestFuseDFS {
     f.write("hi_bigtop\nhi_bigtop\n");
 
     testWrapper("/bin/cp -rf /tmp/FUSETEST_bigtop ${testdir}/cf2",
-        /**
-         * Required sleep:  IS HDFS FUSE Strictly consistent?
-         * Reveals HDFS-6072.*/
-        "sleep 2 && cat ${testdir}/cf2",
-        {
-          //contents of output stream should be "-hello bigtop-"
-          LOG.info("cat output = " + sh.getOut() + " " + sh.getErr() + " " +
-              sh.getRet());
-          def (out, err, ret) = [ sh.getOut(), sh.getErr(), sh.getRet() ];
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue(
-              "Failed: cat didnt contain "+out,
-              out.contains("hi_bigtop"));
-          assertEquals("Failed: return code non-zero", 0, ret);
-        }//validator
+      /**
+       * Required sleep:  IS HDFS FUSE Strictly consistent?
+       * Reveals HDFS-6072.*/
+      "sleep 2 && cat ${testdir}/cf2",
+      {
+        //contents of output stream should be "-hello bigtop-"
+        LOG.info("cat output = " + sh.getOut() + " " + sh.getErr() + " " +
+          sh.getRet());
+        def (out, err, ret) = [sh.getOut(), sh.getErr(), sh.getRet()];
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue(
+          "Failed: cat didnt contain " + out,
+          out.contains("hi_bigtop"));
+        assertEquals("Failed: return code non-zero", 0, ret);
+      }//validator
     );
   }
 
@@ -254,37 +254,37 @@ public class TestFuseDFS {
      * TODO: Determine if the length of this string effect consistency?
      * Small "contents" string might be another way to expose HDFS-6072.
      * */
-    final String contents="ABCDEFGHIJKLMNOPZUIRPIEOF";
-    final String setup="mkdir ${testdir}/targetdir &&"+
-        "echo ${contents} > ${testdir}/cp1 && "+
-        "echo ${contents} > ${testdir}/cp2 && " +
-        "/bin/cp -rf ${testdir}/cp* ${testdir}/targetdir/";
+    final String contents = "ABCDEFGHIJKLMNOPZUIRPIEOF";
+    final String setup = "mkdir ${testdir}/targetdir &&" +
+      "echo ${contents} > ${testdir}/cp1 && " +
+      "echo ${contents} > ${testdir}/cp2 && " +
+      "/bin/cp -rf ${testdir}/cp* ${testdir}/targetdir/";
     testWrapper(
-        setup,//Large setup function so we externalize it above.
-        {
-          def files = ["cp1", "cp2"];
+      setup,//Large setup function so we externalize it above.
+      {
+        def files = ["cp1", "cp2"];
 
-          assertEquals("Failed: ret code non-zero", 0, sh.getRet());
-          sh.exec("ls -altrh ${testdir}/targetdir/");
-          //assert that copy results in the new files
-          //at least in the directory namespace...
-          assertEquals("Failed: ls of target dir ret code non-zero", 0,
-              sh.getRet());
-          files.each() {
-            assertTrue("Failed: to find ${it} in target directory",
-                sh.getOut().toString().contains(it));
-          }
-          //Assert that the copy resulted in identical files
-          //Note that due to eventual consistency, etc, this is
-          //an important test for typical fuse behaviour and workload
-          files.each() {
-            sh.exec("diff " +"${testdir}/${it} "
-                +"${testdir}/targetdir/${it}");
-            assertTrue("Failed: Detected a difference between ${it} in " +
-                "${testdir} vs " + "the ${testdir}/targetdir diff=" +sh.out ,
-                sh.getRet().equals(0));
-          }
-        }//validator
+        assertEquals("Failed: ret code non-zero", 0, sh.getRet());
+        sh.exec("ls -altrh ${testdir}/targetdir/");
+        //assert that copy results in the new files
+        //at least in the directory namespace...
+        assertEquals("Failed: ls of target dir ret code non-zero", 0,
+          sh.getRet());
+        files.each() {
+          assertTrue("Failed: to find ${it} in target directory",
+            sh.getOut().toString().contains(it));
+        }
+        //Assert that the copy resulted in identical files
+        //Note that due to eventual consistency, etc, this is
+        //an important test for typical fuse behaviour and workload
+        files.each() {
+          sh.exec("diff " + "${testdir}/${it} "
+            + "${testdir}/targetdir/${it}");
+          assertTrue("Failed: Detected a difference between ${it} in " +
+            "${testdir} vs " + "the ${testdir}/targetdir diff=" + sh.out,
+            sh.getRet().equals(0));
+        }
+      }//validator
     );
   }
 
@@ -292,17 +292,17 @@ public class TestFuseDFS {
   public void testMv() {
     //test that move recursively moves stuff
     testWrapper("mkdir -p ${testdir}/subdir1 && touch " +
-        "${testdir}/subdir1/innerfile",
-        "mv ${testdir}/subdir1 ${testdir}/subdir2",
-        {
-          assertEquals("Failed: cp exit code != 0", 0, sh.getRet());
-          sh.exec("ls -altrh ${testdir}/subdir2/");
-          //assert that the inner file exists
-          assertTrue(sh.getOut().toString().contains("innerfile"));
-          //assert that original file is gone
-          sh.exec("ls -altrh ${testdir}");
-          assertTrue(!sh.getOut().toString().contains("subdir1"));
-        }//validator
+      "${testdir}/subdir1/innerfile",
+      "mv ${testdir}/subdir1 ${testdir}/subdir2",
+      {
+        assertEquals("Failed: cp exit code != 0", 0, sh.getRet());
+        sh.exec("ls -altrh ${testdir}/subdir2/");
+        //assert that the inner file exists
+        assertTrue(sh.getOut().toString().contains("innerfile"));
+        //assert that original file is gone
+        sh.exec("ls -altrh ${testdir}");
+        assertTrue(!sh.getOut().toString().contains("subdir1"));
+      }//validator
     );
   }
 
@@ -310,12 +310,12 @@ public class TestFuseDFS {
   @Test
   public void testRm() {
     testWrapper("touch ${testdir}/file-removed",
-        "rm ${testdir}/file-removed",
-        {
-          assertEquals("Failed: rm ret code non-zero", 0, sh.getRet());
-          sh.exec("ls ${testdir}");
-          assertTrue(!sh.getOut().toString().contains("file-removed"));
-        }//validator
+      "rm ${testdir}/file-removed",
+      {
+        assertEquals("Failed: rm ret code non-zero", 0, sh.getRet());
+        sh.exec("ls ${testdir}");
+        assertTrue(!sh.getOut().toString().contains("file-removed"));
+      }//validator
     );
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
index a75f016..f7bc04e 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
@@ -119,7 +119,7 @@ public class TestBlockRecovery {
     blockToTest = sh.exec("grep -o 'blk_[0-9]*' $outputFile").getOut()[0];
     assertTrue("Could not obtain block number", sh.getRet() == 0);
 
-    for (int i=0; i < dataDirs.length; i++) {
+    for (int i = 0; i < dataDirs.length; i++) {
       def dataDir = dataDirs[i]
       blockLocation = sh.exec("find $dataDir -name $blockToTest | grep $dataDir").getOut()[0];
       if (blockLocation != null) break;
@@ -137,7 +137,7 @@ public class TestBlockRecovery {
     assertTrue("Could not delete file $fsFilePath", sh.getRet() == 0);
     sh.exec("rm -rf $localTestDir");
     assertTrue("Could not delete test directory $localTestDir", sh.getRet() == 0);
-    }
+  }
 
   @Test
   public void testBlockRecovery() {
@@ -153,14 +153,14 @@ public class TestBlockRecovery {
     sh.exec("hadoop fs -cat $fsFilePath");
 
     // make sure checksum changes back to original, indicating block recovery
-    for (int j=0; j<3; j++) {
+    for (int j = 0; j < 3; j++) {
       // wait a bit to let the block recover
       sleep(sleepTime);
       // see if checksum has changed
       cksumError = sh.exec("hadoop fs -cat $fsFilePath | grep -o 'Checksum error'").getErr();
       if (cksumError != "Checksum error") break;
     }
-    assertNotNull ("Block has not been successfully triggered for recovery.", cksumError);
+    assertNotNull("Block has not been successfully triggered for recovery.", cksumError);
 
     nodesAfterRecovery = sh.exec("hdfs fsck $fsFilePath -blocks -locations -files | $grepIP").getOut();
     assertTrue("Could not obtain datanode addresses", sh.getRet() == 0);
@@ -174,7 +174,7 @@ public class TestBlockRecovery {
       assertTrue("Could not obtain datanode addresses", sh.getRet() == 0);
 
       blockRecoveryNode = (nodesBeforeRecovery.intersect(nodesAfterRecovery))[0];
-      assert (blockRecoveryNode.size() != 0) : "Block has not been successfully triggered for recovery."
+      assert (blockRecoveryNode.size() != 0): "Block has not been successfully triggered for recovery."
     }
 
     int cksumAttempt;

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
index bea1595..e4168f5 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
@@ -27,14 +27,14 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestDFSAdmin {
- 
+
   // set debugging variable to true if you want error messages sent to stdout
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
 
   @BeforeClass
   public static void setUp() {
     // unpack resource
-    JarContent.unpackJarContainer(TestDFSAdmin.class, "." , null);
+    JarContent.unpackJarContainer(TestDFSAdmin.class, ".", null);
     System.out.println("Running DFSAdmin commands:");
   }
 
@@ -43,32 +43,32 @@ public class TestDFSAdmin {
   }
 
   @Test
-  public void testDFSbasic() { 
+  public void testDFSbasic() {
     // report
-    System.out.println("-report"); 
-    shHDFS.exec("hdfs dfsadmin -report");    
+    System.out.println("-report");
+    shHDFS.exec("hdfs dfsadmin -report");
     assertTrue("-report failed", shHDFS.getRet() == 0);
 
     // help
-    System.out.println("-help"); 
+    System.out.println("-help");
     shHDFS.exec("hdfs dfsadmin -help");
     assertTrue("-help failed", shHDFS.getRet() == 0);
 
     // printTopology
-    System.out.println("-printTopology"); 
+    System.out.println("-printTopology");
     shHDFS.exec("hdfs dfsadmin -printTopology");
     assertTrue("-printTopology failed", shHDFS.getRet() == 0);
 
     // metasave
     System.out.println("-metasave");
     shHDFS.exec("hdfs dfsadmin -metasave metasave_test");
-    assertTrue("-metasave failed", shHDFS.getRet() == 0); 
+    assertTrue("-metasave failed", shHDFS.getRet() == 0);
   }
 
   @Test
   public void testDFSsafemode() {
     // safemode
-    System.out.println("-safemode"); 
+    System.out.println("-safemode");
     shHDFS.exec("hdfs dfsadmin -safemode leave");
     assertTrue("-safemode leave failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -safemode get");
@@ -80,25 +80,25 @@ public class TestDFSAdmin {
     assertTrue("-safemode get failed", shHDFS.getOut().get(0) == "Safe mode is ON");
     assertTrue("-safemode get failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -safemode leave");
-    assertTrue("-safemode leave failed", shHDFS.getRet() == 0); 
+    assertTrue("-safemode leave failed", shHDFS.getRet() == 0);
   }
 
   @Test
   public void testDFSnamespace() {
     // saveNamespace
     System.out.println("-saveNamespace");
-    shHDFS.exec("hdfs dfsadmin -safemode enter"); 
+    shHDFS.exec("hdfs dfsadmin -safemode enter");
     shHDFS.exec("hdfs dfsadmin -saveNamespace");
     assertTrue("-saveNamespace failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -safemode leave");
-    shHDFS.exec("hdfs dfsadmin -saveNamespace"); 
+    shHDFS.exec("hdfs dfsadmin -saveNamespace");
     assertTrue("-saveNamespace worked in non safemode", shHDFS.getRet() != 0);
   }
 
   @Test
   public void testDFSrefreshcommands() {
     // refreshNodes
-    System.out.println("-refreshNodes"); 
+    System.out.println("-refreshNodes");
     shHDFS.exec("hdfs dfsadmin -refreshNodes");
     assertTrue("-refreshNodes failed", shHDFS.getRet() == 0);
 
@@ -107,7 +107,7 @@ public class TestDFSAdmin {
     shHDFS.exec("hdfs dfsadmin -refreshServiceAcl");
     System.out.println(shHDFS.getRet());
     assertTrue("-refreshServiceAcl failed", shHDFS.getRet() == 0); */
-   
+
     // refreshUserToGroupsMappings
     System.out.println("-refreshUserToGroupsMappings");
     shHDFS.exec("hdfs dfsadmin -refreshUserToGroupsMappings");
@@ -116,13 +116,13 @@ public class TestDFSAdmin {
     // refreshSuperUserGroupsConfiguration
     System.out.println("-refreshSuperUserGroupsConfiguration");
     shHDFS.exec("hdfs dfsadmin -refreshSuperUserGroupsConfiguration");
-    assertTrue("-refreshSuperUserGroupsConfiguration failed", shHDFS.getRet() == 0); 
+    assertTrue("-refreshSuperUserGroupsConfiguration failed", shHDFS.getRet() == 0);
   }
 
   @Test
-  public void testDFSstorage() {  
+  public void testDFSstorage() {
     // restoreFailedStorage
-    System.out.println("-restoreFailedStorage"); 
+    System.out.println("-restoreFailedStorage");
     shHDFS.exec("hdfs dfsadmin -restoreFailedStorage false");
     assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -restoreFailedStorage check");
@@ -134,7 +134,7 @@ public class TestDFSAdmin {
     assertTrue("-restoreFailedStorage check", shHDFS.getOut().get(0) == "restoreFailedStorage is set to true");
     assertTrue("-restoreFailedStorage check", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -restoreFailedStorage false");
-    assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0); 
+    assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0);
   }
 
   @Test
@@ -142,18 +142,18 @@ public class TestDFSAdmin {
     // setQuota, clrQuota
     System.out.println("-setQuota, -clrQuota");
     shHDFS.exec("date");
-    String quota_test = "quota_test" + shHDFS.getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+    String quota_test = "quota_test" + shHDFS.getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
     shHDFS.exec("hadoop fs -test -e $quota_test");
     if (shHDFS.getRet() == 0) {
       shHDFS.exec("hadoop fs -rmr -skipTrash $quota_test");
       assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
-          shHDFS.getRet() == 0);
+        shHDFS.getRet() == 0);
     }
     shHDFS.exec("hadoop fs -mkdir -p $quota_test");
     shHDFS.exec("hdfs dfsadmin -setQuota 1000 $quota_test");
     assertTrue("-setQuota failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -clrQuota $quota_test");
-    assertTrue("-clrQuota failed", shHDFS.getRet() == 0); 
+    assertTrue("-clrQuota failed", shHDFS.getRet() == 0);
 
     // setSpaceQuota, clrSpaceQuota
     System.out.println("-setSpaceQuota, -clrSpaceQuota");
@@ -161,7 +161,7 @@ public class TestDFSAdmin {
     assertTrue("-setSpaceQuota failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -clrSpaceQuota $quota_test");
     assertTrue("-clrSpaceQuota failed", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -rmr $quota_test"); 
+    shHDFS.exec("hadoop fs -rmr $quota_test");
   }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
index a0848ad..63d4232 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
@@ -55,7 +55,7 @@ public class TestDFSCLI extends TestHDFSCLI {
     clitestDataDir = new File(TEST_CACHE_DATA_DIR).toURI().toString().replace(' ', '+');
 
     String[] createTestcliDirCmds = {
-        "hadoop fs -mkdir -p "  + TEST_DIR_ABSOLUTE,
+        "hadoop fs -mkdir -p " + TEST_DIR_ABSOLUTE,
         "hadoop fs -chmod 777 " + TEST_DIR_ABSOLUTE
     };
     shHDFS.exec(createTestcliDirCmds);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
index d14d664..40330fc 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
@@ -26,11 +26,11 @@ import org.apache.hadoop.conf.Configuration;
 
 
 public class TestDistCpIntra {
- 
+
   private static Shell sh = new Shell("/bin/bash -s");
   //extracting user identity for distcp absolute path
   private static final String USERNAME = System.getProperty("user.name");
-  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
   private static String namenode = "";
   private static String testDistcpInputs = "testDistcpInputs" + date;
   private static String testDistcpOutputs = "testDistcpOutputs" + date;
@@ -39,7 +39,7 @@ public class TestDistCpIntra {
   private static String testDistcpOut = "testDistcpOut" + date;
 
   @BeforeClass
-  public static void setUp() {   
+  public static void setUp() {
     // get namenode hostname from core-site.xml
     Configuration conf = new Configuration();
     namenode = conf.get("fs.defaultFS");
@@ -64,7 +64,7 @@ public class TestDistCpIntra {
       String dcpfile_i = "$dcpfile" + "$i" + ".txt";
       sh.exec("echo \"test$i\" > $dcpfile_i");
     }
-    
+
     // copy sample input files to hdfs
     sh.exec("hadoop fs -put $dcpfile* $testDistcpInputs");
     assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
@@ -90,15 +90,15 @@ public class TestDistCpIntra {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs");
       assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
 
     for (int i = 4; i <= 7; i++) {
       sh.exec("hadoop fs -test -e $testDistcpInputs$i");
       if (sh.getRet() == 0) {
-       sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs$i");
+        sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs$i");
         assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
-            sh.getRet() == 0);
+          sh.getRet() == 0);
       }
     }
 
@@ -106,19 +106,19 @@ public class TestDistCpIntra {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testDistcpOutputs");
       assertTrue("Deletion of previous testDistcpOutputs from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
 
   }
 
   @Test
-  public void testDistcpIntra() { 
+  public void testDistcpIntra() {
     for (int i = 1; i <= 2; i++) {
       String dcpfile_i = "$dcpfile" + "$i" + ".txt";
       // running distcp from namenode/src to namenode/dest
       sh.exec("hadoop distcp $namenode/user/$USERNAME/$testDistcpInputs/$dcpfile_i $namenode/user/$USERNAME/$testDistcpOutputs");
       assertTrue("Distcp $i failed", sh.getRet() == 0);
-      
+
       // confirm that copied file is the same as original file
       sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpInputs/$dcpfile_i > $testDistcpIn");
       sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpOutputs/$dcpfile_i > $testDistcpOut");
@@ -128,24 +128,24 @@ public class TestDistCpIntra {
       // clean up
       sh.exec("rm -rf $testDistcpIn", "rm -rf $testDistcpOut");
     }
-  } 
+  }
 
   @Test
-  public void testDistcpIntra_MultipleSources() { 
+  public void testDistcpIntra_MultipleSources() {
     String distcp_sources = "distcp_sources" + date;
     String dcpfile4 = "$testDistcpInputs" + "4/$dcpfile" + "4.txt"
     String dcpfile5 = "$testDistcpInputs" + "5/$dcpfile" + "5.txt"
     String dcpfile6 = "$testDistcpInputs" + "6/$dcpfile" + "6.txt"
     String dcpfile7 = "$testDistcpInputs" + "7/$dcpfile" + "7.txt"
     // distcp mulitple sources
-    sh.exec("hadoop distcp $namenode/user/$USERNAME/$dcpfile4 $namenode/user/$USERNAME/$dcpfile5 $namenode/user/$USERNAME/$testDistcpOutputs");  
+    sh.exec("hadoop distcp $namenode/user/$USERNAME/$dcpfile4 $namenode/user/$USERNAME/$dcpfile5 $namenode/user/$USERNAME/$testDistcpOutputs");
     assertTrue("Distcp multiple sources failed", sh.getRet() == 0);
 
     // distcp source file (-f option)
     sh.exec("echo \"$namenode/user/$USERNAME/$dcpfile6\" > $distcp_sources", "echo \"$namenode/user/$USERNAME/$dcpfile7\" >> $distcp_sources");
     sh.exec("hadoop fs -put $distcp_sources $namenode/user/$USERNAME/$testDistcpInputs");
     sh.exec("rm -rf $distcp_sources");
-    sh.exec("hadoop distcp -f $namenode/user/$USERNAME/$testDistcpInputs/$distcp_sources $namenode/user/$USERNAME/$testDistcpOutputs"); 
+    sh.exec("hadoop distcp -f $namenode/user/$USERNAME/$testDistcpInputs/$distcp_sources $namenode/user/$USERNAME/$testDistcpOutputs");
     assertTrue("Distcp with a source file failed", sh.getRet() == 0);
 
     // confirm that copied files are the same as original files for multiple sources and source file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
index f0b4436..41ee356 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
@@ -27,13 +27,13 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.Configuration;
 
 public class TestFileAppend {
- 
+
   private static Shell sh = new Shell("/bin/bash -s");
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
   private static final String HADOOP_HOME = System.getenv('HADOOP_HOME');
   private static final String HADOOP_CONF_DIR = System.getenv('HADOOP_CONF_DIR');
   private static final String USERNAME = System.getProperty("user.name");
-  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
   private static String testAppendInput = "testAppendInput$date";
   private static String testAppendOutput = "testAppendOutput$date";
   private static String namenode;
@@ -67,15 +67,15 @@ public class TestFileAppend {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testAppendInput");
       assertTrue("Deletion of previous testAppendInputs from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
 
   }
 
   @Test
-  public void testAppendOnPreExistingFile() { 
+  public void testAppendOnPreExistingFile() {
     FileSystem fs = FileSystem.get(conf);
-    
+
     // setting paths for I/O stream creation
     String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput2.txt$date";
     Path inFile = new Path(myInputPath);
@@ -83,7 +83,7 @@ public class TestFileAppend {
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput1.txt$date";
     Path outFile = new Path(myOutputPath);
     assertTrue("Output file not found", fs.exists(outFile));
-    
+
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
 
@@ -100,7 +100,7 @@ public class TestFileAppend {
   @Test
   public void testAppendOnCreatedFile() {
     FileSystem fs = FileSystem.get(conf);
-    
+
     // setting paths for I/O stream creation
     String myOutputCreate = namenode + "/user/$USERNAME/$testAppendInput/appendinput3.txt$date";
     Path outCreate = new Path(myOutputCreate);
@@ -108,7 +108,7 @@ public class TestFileAppend {
     String myString = "-----TEST INPUT1-----\n";
     InputStream is = new ByteArrayInputStream(myString.getBytes());
     IOUtils.copyBytes(is, outputTemp, 4096, true);
- 
+
     String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput2.txt$date";
     Path inFile = new Path(myInputPath);
     assertTrue("Input file not found", fs.exists(inFile));
@@ -127,8 +127,8 @@ public class TestFileAppend {
     assertTrue("Append did not work", sh.getRet() == 0);
     sh.exec("rm -rf $testAppendOutput", "rm -rf appendinput1.txt$date", "rm -rf appendinput2.txt$date");
     sh.exec("rm -rf appendCorrect.txt$date");
-    sh.exec("rm -rf appendinput3.txt$date"); 
- }
+    sh.exec("rm -rf appendinput3.txt$date");
+  }
 
 
   @Test
@@ -150,11 +150,11 @@ public class TestFileAppend {
     assertTrue("Input file not found", fs.exists(inFile));
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/3mboutput.file$date";
     Path outFile = new Path(myOutputPath);
-    assertTrue("Output file not found", fs.exists(outFile));  
+    assertTrue("Output file not found", fs.exists(outFile));
 
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
-    
+
     // append
     IOUtils.copyBytes(input1, output1, 4096, true);
 
@@ -182,17 +182,17 @@ public class TestFileAppend {
     assertTrue("Input file not found", fs.exists(inFile));
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/test2.file$date";
     Path outFile = new Path(myOutputPath);
-    assertTrue("Output file not found", fs.exists(outFile));  
+    assertTrue("Output file not found", fs.exists(outFile));
 
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
-    
+
     // append
-    IOUtils.copyBytes(input1, output1, 4096, true); 
-  
+    IOUtils.copyBytes(input1, output1, 4096, true);
+
     // running fsck
     shHDFS.exec("hadoop fsck /user/$USERNAME/$testAppendInput/test2.file$date");
-    Boolean success = shHDFS.getOut().get(shHDFS.getOut().size() - 1).contains("is HEALTHY");;
+    Boolean success = shHDFS.getOut().get(shHDFS.getOut().size() - 1).contains("is HEALTHY"); ;
     assertTrue("Append made file unhealthy", success == true);
 
     sh.exec("rm -rf test1.file$date", "rm -rf test2.file$date");
@@ -220,7 +220,7 @@ public class TestFileAppend {
     assertTrue("Input file not found", fs.exists(inFile));
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/test4.file$date";
     Path outFile = new Path(myOutputPath);
-    assertTrue("Output file not found", fs.exists(outFile));  
+    assertTrue("Output file not found", fs.exists(outFile));
 
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
@@ -232,7 +232,7 @@ public class TestFileAppend {
     try {
       FSDataOutputStream output2 = fs2.append(outFile);
       assertTrue("Should not have been able to open second output stream", false);
-      IOUtils.closeStream(output2); 
+      IOUtils.closeStream(output2);
     }
     catch (Exception e) {
     }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
index 62efd7c..040c3b5 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
@@ -26,7 +26,7 @@ import static org.apache.bigtop.itest.LogErrorsUtils.logError
  * Tests the HDFS fsck command.
  */
 public class TestFsck {
-  static Shell shHDFS = new Shell("/bin/bash", "hdfs" )
+  static Shell shHDFS = new Shell("/bin/bash", "hdfs")
   String[] fsckCmds = [
     "hdfs fsck /",
     "hdfs fsck -move /",

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
index e0fca84..f22b005 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
@@ -27,7 +27,7 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestHDFSBalancer {
- 
+
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
   // set with -Dthreshold
   private static String thresh = "10";
@@ -35,10 +35,10 @@ public class TestHDFSBalancer {
   @BeforeClass
   public static void setUp() {
     // unpack resource
-    JarContent.unpackJarContainer(TestHDFSBalancer.class, "." , null);
+    JarContent.unpackJarContainer(TestHDFSBalancer.class, ".", null);
     if (System.getProperty("threshold") != null) {
       thresh = System.getProperty("threshold");
-    }  
+    }
   }
 
   @AfterClass
@@ -46,13 +46,13 @@ public class TestHDFSBalancer {
   }
 
   @Test
-  public void testBalancer() { 
+  public void testBalancer() {
     System.out.println("Running Balancer:");
-    System.out.println("Threshold is set to " + thresh +". Toggle by adding -Dthreshold=#");
+    System.out.println("Threshold is set to " + thresh + ". Toggle by adding -Dthreshold=#");
 
     // must run balancer as hdfs user   
     shHDFS.exec("hdfs balancer -threshold $thresh");
-  
+
     boolean success = false;
     // success_string message signifies balancing worked correctly
     String success_string1 = "The cluster is balanced. Exiting..."
@@ -64,7 +64,7 @@ public class TestHDFSBalancer {
       String next_val = out_iter.next();
       if (next_val.equals(success_string1) || next_val.contains(success_string2) || next_val.contains(success_string3)) {
         success = true;
-       }
+      }
     }
 
     String failure_string1 = "namenodes = []"

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
index a2c0c57..a80f27b 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
@@ -24,17 +24,17 @@ import org.junit.Test;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestHDFSQuota {
- 
+
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
   private static Shell sh = new Shell("/bin/bash");
   private static final long LARGE = Long.MAX_VALUE - 1;
   private static final String USERNAME = System.getProperty("user.name");
-  private static String quotaDate = shHDFS.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String quotaDate = shHDFS.exec("date").getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
   private static String testQuotaFolder = "/tmp/testQuotaFolder" + quotaDate;
   private static String testQuotaFolder1 = testQuotaFolder + "1";
   private static String testQuotaFolder2 = testQuotaFolder + "2";
   private static String testQuotaFolder3 = testQuotaFolder + "3";
-  
+
   @Before
   public void setUp() {
     // creating test folders
@@ -52,24 +52,24 @@ public class TestHDFSQuota {
     if (shHDFS.getRet() == 0) {
       shHDFS.exec("hadoop fs -rmr -skipTrash $testQuotaFolder1");
       assertTrue("Deletion of previous testQuotaFolder1 from HDFS failed",
-          shHDFS.getRet() == 0);
+        shHDFS.getRet() == 0);
     }
     shHDFS.exec("hadoop fs -test -e $testQuotaFolder2");
     if (shHDFS.getRet() == 0) {
       shHDFS.exec("hadoop fs -rmr -skipTrash $testQuotaFolder2");
       assertTrue("Deletion of previous testQuotaFolder2 from HDFS failed",
-          shHDFS.getRet() == 0);
+        shHDFS.getRet() == 0);
     }
     sh.exec("hadoop fs -test -e $testQuotaFolder1");
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testQuotaFolder1");
       assertTrue("Deletion of previous testQuotaFolder1 from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
   }
 
   @Test
-  public void testNewlyCreatedDir() { 
+  public void testNewlyCreatedDir() {
     // newly created dir should have no name quota, no space quota   
     shHDFS.exec("hadoop fs -count -q $testQuotaFolder1");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
@@ -78,10 +78,10 @@ public class TestHDFSQuota {
     assertTrue("Newly created directory had a set name quota left", output[1].equals("inf"));
     assertTrue("Newly created directory had a set space quota", output[2].equals("none"));
     assertTrue("Newly created directory had a set space quota left", output[3].equals("inf"));
-  } 
+  }
 
   @Test
-  public void testAdminPermissions() { 
+  public void testAdminPermissions() {
     // admin setting quotas should succeed
     shHDFS.exec("hadoop dfsadmin -setQuota 10 $testQuotaFolder1");
     assertTrue("setQuota failed", shHDFS.getRet() == 0);
@@ -103,10 +103,10 @@ public class TestHDFSQuota {
     assertTrue("clrQuota failed", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -clrSpaceQuota $testQuotaFolder1");
     assertTrue("clrSpaceQuota failed", shHDFS.getRet() == 0);
-  } 
+  }
 
   @Test
-  public void testRename() { 
+  public void testRename() {
     // name and space quotas stick after rename
     shHDFS.exec("hadoop fs -count -q $testQuotaFolder1");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
@@ -124,16 +124,16 @@ public class TestHDFSQuota {
   }
 
   @Test
-  public void testInputValues() { 
+  public void testInputValues() {
     // the largest allowable quota size is Long.Max_Value and must be greater than zero
     shHDFS.exec("hadoop dfsadmin -setQuota -1 $testQuotaFolder1");
     assertTrue("setQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota -1 $testQuotaFolder1");
-    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);  
+    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 1.04 $testQuotaFolder1");
     assertTrue("setQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1.04 $testQuotaFolder1");
-    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);        
+    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 0 $testQuotaFolder1");
     assertTrue("setQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 0 $testQuotaFolder1");
@@ -158,11 +158,11 @@ public class TestHDFSQuota {
   }
 
   @Test
-  public void testQuotasPostViolation() {  
+  public void testQuotasPostViolation() {
     // quota can be set even if it violates
     shHDFS.exec("hadoop dfsadmin -setQuota $LARGE $testQuotaFolder1");
     assertTrue("Could not setQuota", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString1", "-------TEST STRING--------"); 
+    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString1", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop fs -mkdir $testQuotaFolder1" + "/sample1");
     assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);
@@ -180,8 +180,8 @@ public class TestHDFSQuota {
     assertTrue("mkdir should not have worked", shHDFS.getRet() != 0);
 
     // file creation should fail - name quota
-    shHDFS.exec("hadoop fs -rmr $testQuotaFolder1" + "/testString1"); 
-    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString2", "-------TEST STRING--------"); 
+    shHDFS.exec("hadoop fs -rmr $testQuotaFolder1" + "/testString1");
+    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString2", "-------TEST STRING--------");
     assertTrue("put should not have worked", shHDFS.getRet() != 0);
 
     // file creation should fail - space quota
@@ -189,8 +189,8 @@ public class TestHDFSQuota {
     assertTrue("Could not setSpaceQuota", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 1000 $testQuotaFolder1");
     assertTrue("Could not setQuota", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -put - $testQuotaFolder1"  + "/testString3", "-------TEST STRING--------"); 
-    assertTrue("put should not have worked", shHDFS.getRet() != 0); 
+    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString3", "-------TEST STRING--------");
+    assertTrue("put should not have worked", shHDFS.getRet() != 0);
   }
 
   //@Test - can be reinstated upon resolution of BIGTOP-635 due to restarting of hdfs service
@@ -202,37 +202,35 @@ public class TestHDFSQuota {
     shHDFS.exec("hadoop fs -put - $date" + "/testString1", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 1 $date");
-    assertTrue("Could not setQuota", shHDFS.getRet() == 0); 
+    assertTrue("Could not setQuota", shHDFS.getRet() == 0);
     shHDFS.exec("date");
-    String date1 = "logTest" + shHDFS.getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+    String date1 = "logTest" + shHDFS.getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
     shHDFS.exec("hadoop fs -mkdir $date1");
     assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -put - $date1"  + "/testString2", "-------TEST STRING--------"); 
+    shHDFS.exec("hadoop fs -put - $date1" + "/testString2", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1 $date1");
-    assertTrue("Could not setSpaceQuota", shHDFS.getRet() == 0); 
+    assertTrue("Could not setSpaceQuota", shHDFS.getRet() == 0);
     shHDFS.exec("for service in /etc/init.d/hadoop-hdfs-*; do sudo \$service stop; done");
     shHDFS.exec("for service in /etc/init.d/hadoop-hdfs-*; do sudo \$service start; done");
     shHDFS.exec("grep \"Quota violation in image for //user/hdfs/$date\" /var/log/hadoop-hdfs/hadoop-hdfs-namenode*.log");
     if (shHDFS.getOut().isEmpty()) {
       assertTrue("Log was not written", 1 == 0);
-    }
-    else {
+    } else {
       assertTrue(shHDFS.getOut().get(0).contains(date));
     }
     shHDFS.exec("grep \"Quota violation in image for //user/hdfs/$date1\" /var/log/hadoop-hdfs/hadoop-hdfs-namenode*.log");
     if (shHDFS.getOut().isEmpty()) {
       assertTrue("Log was not written", 1 == 0);
-    }
-    else {
+    } else {
       assertTrue(shHDFS.getOut().get(0).contains(date1));
     }
-    
+
     shHDFS.exec("hadoop fs -rmr $date1");
     // following while loop is due to namenode going into safemode for about 15 seconds after being restarted
     while (shHDFS.getErr().get(0).contains("safe mode") || (shHDFS.getErr().size() > 1 && shHDFS.getErr().get(1).contains("safe mode"))) {
-          shHDFS.exec("hadoop fs -rmr $date1");
-    } 
+      shHDFS.exec("hadoop fs -rmr $date1");
+    }
   }
 
   @Test
@@ -246,7 +244,7 @@ public class TestHDFSQuota {
     shHDFS.exec("hadoop dfsadmin -setQuota 1000 $date/testString1");
     assertTrue("setting quota on a file should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1000 $date/testString1");
-    assertTrue("setting quota on a file should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("setting quota on a file should not have worked", shHDFS.getRet() != 0);
 
     // Errors when clearing quotas on a file
     shHDFS.exec("hadoop dfsadmin -clrQuota $date/testString1");
@@ -256,15 +254,15 @@ public class TestHDFSQuota {
 
     // set/clr quota on nonexistant directory
     shHDFS.exec("hadoop dfsadmin -setQuota 100 DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 100 DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -clrQuota DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -clrSpaceQuota DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
 
-    shHDFS.exec("hadoop fs -rmr $date"); 
+    shHDFS.exec("hadoop fs -rmr $date");
   }
 
   @Test
@@ -272,29 +270,29 @@ public class TestHDFSQuota {
     // increasing/decreasing replication factor of a file should debit/credit quota
     String repFolder = "/tmp/repFactorTest" + quotaDate;
     shHDFS.exec("hadoop fs -mkdir $repFolder");
-    assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);    
-    shHDFS.exec("hadoop fs -put - $repFolder" + "/testString1" , "-------TEST STRING--------");
+    assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);
+    shHDFS.exec("hadoop fs -put - $repFolder" + "/testString1", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1000 $repFolder");
-    assertTrue("Could not setQuota", shHDFS.getRet() == 0); 
+    assertTrue("Could not setQuota", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop fs -setrep 1 $repFolder/testString1");
     shHDFS.exec("hadoop fs -count -q $repFolder");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
-    String[] output = shHDFS.getOut().get(0).trim().split();   
+    String[] output = shHDFS.getOut().get(0).trim().split();
     int size_of_one = Integer.parseInt(output[2]) - Integer.parseInt(output[3]);
     shHDFS.exec("hadoop fs -setrep 5 $repFolder/testString1");
     shHDFS.exec("hadoop fs -count -q $repFolder");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
-    output = shHDFS.getOut().get(0).trim().split();   
+    output = shHDFS.getOut().get(0).trim().split();
     int size_of_five = Integer.parseInt(output[2]) - Integer.parseInt(output[3]);
     assertTrue("Quota not debited correctly", size_of_one * 5 == size_of_five);
     shHDFS.exec("hadoop fs -setrep 3 $repFolder/testString1");
     shHDFS.exec("hadoop fs -count -q $repFolder");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
-    output = shHDFS.getOut().get(0).trim().split();   
+    output = shHDFS.getOut().get(0).trim().split();
     int size_of_three = Integer.parseInt(output[2]) - Integer.parseInt(output[3]);
     assertTrue("Quota not credited correctly", size_of_one * 3 == size_of_three);
-    shHDFS.exec("hadoop fs -rmr $repFolder"); 
+    shHDFS.exec("hadoop fs -rmr $repFolder");
   }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
index 12e655e..52df9cb 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
@@ -30,10 +30,10 @@ class TestTextSnappy {
   static String snappyFile = "part-00001.snappy"
 
   @BeforeClass
-  static void  setUp() throws IOException {
+  static void setUp() throws IOException {
     sh.exec(
-    "hadoop fs  -mkdir ${testDir}",
-    "hadoop fs -put ${snappyFile} ${testDir}/${snappyFile}",
+      "hadoop fs  -mkdir ${testDir}",
+      "hadoop fs -put ${snappyFile} ${testDir}/${snappyFile}",
     )
     logError(sh)
   }