You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by db...@apache.org on 2015/02/12 06:48:47 UTC

[1/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across test-artifacts

Repository: bigtop
Updated Branches:
  refs/heads/master bb86afb3b -> 3e17db895


http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy
index 38013b5..2a0ed1c 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasics.groovy
@@ -58,8 +58,10 @@ class TestPackagesBasics extends PackageTestCommon {
 
   @BeforeClass
   public static void setUp() {
-    tryOrFail({repo.addRepo()}, 2, "adding repository failed");
-    tryOrFail({(repo.getPm().refresh() == 0)}, 1, "refreshing repository failed");
+    tryOrFail({ repo.addRepo() }, 2, "adding repository failed");
+    tryOrFail({
+      (repo.getPm().refresh() == 0)
+    }, 1, "refreshing repository failed");
   }
 
   @AfterClass
@@ -71,36 +73,38 @@ class TestPackagesBasics extends PackageTestCommon {
   private static void mergeTreeIntoForrest(NodeList forrest, Node tree) {
     for (it in forrest) {
       if (it instanceof Node && it.name() == tree.name()) {
-        tree.value().groupBy({(it instanceof Node)?it.name():"-$it"}).each { k,v -> 
+        tree.value().groupBy({
+          (it instanceof Node) ? it.name() : "-$it"
+        }).each { k, v ->
           if (v.size() == 1 && v.get(0) instanceof Node) {
             mergeTreeIntoForrest(it.value(), v.get(0));
           } else if (v.size() != 1) {
             it.value().addAll(v);
           }
         }
-      return;
-     }
-   }
-   forrest.add(tree);
+        return;
+      }
+    }
+    forrest.add(tree);
   }
-    
+
   private static Node mergeTrees(Node n1, Node n2) {
-     Node merge = new Node(null, "merge");
-     merge.append(n1);
-     mergeTreeIntoForrest(merge.value(), n2);
-     return (merge.children().size() == 1) ? merge.children().get(0) :
-                                             merge;
-  } 
+    Node merge = new Node(null, "merge");
+    merge.append(n1);
+    mergeTreeIntoForrest(merge.value(), n2);
+    return (merge.children().size() == 1) ? merge.children().get(0) :
+      merge;
+  }
 
   @Parameters
   public static Map<String, Object[]> generateTests() {
     String type = TestPackagesBasics.pm.getType();
-    String arch = (new Shell()).exec("uname -m").getOut().get(0).replaceAll(/i.86/,"i386").replaceAll(/x86_64/,"amd64");
+    String arch = (new Shell()).exec("uname -m").getOut().get(0).replaceAll(/i.86/, "i386").replaceAll(/x86_64/, "amd64");
     String archTranslated = (type == "apt") ? "" : ((arch == "amd64") ? ".x86_64" : ".${arch}");
     def config = mergeTrees(new XmlParser().parse(TestPackagesBasics.class.getClassLoader().
-                                            getResourceAsStream("package_data.xml")),
-                            new XmlParser().parse(TestPackagesBasics.class.getClassLoader().
-                                            getResourceAsStream("${type}/package_data.xml")));
+      getResourceAsStream("package_data.xml")),
+      new XmlParser().parse(TestPackagesBasics.class.getClassLoader().
+        getResourceAsStream("${type}/package_data.xml")));
 
     Map<String, Object[]> res = [:];
 
@@ -135,35 +139,37 @@ class TestPackagesBasics extends PackageTestCommon {
     name = pkgName;
     golden = pkgGolden;
     // hopefully the following line will go away soon, once PackageInstance becomes more sophisticated
-    synchronized (pkgs) { pkgs[name] = pkgs[name] ?: PackageInstance.getPackageInstance(pm, name); }
+    synchronized (pkgs) {
+      pkgs[name] = pkgs[name] ?: PackageInstance.getPackageInstance(pm, name);
+    }
     pkg = pkgs[name];
   }
 
-  @RunStage(level=-3)
+  @RunStage(level = -3)
   @Test
-  synchronized void testRemoteMetadata() { 
+  synchronized void testRemoteMetadata() {
     if (!isUpgrade()) {
       if (pkg.isInstalled()) {
         checkThat("package $name is already installed and could not be removed",
-                  pkg.remove(), equalTo(0));
+          pkg.remove(), equalTo(0));
       }
 
       checkRemoteMetadata(getMap(golden.metadata), false);
     }
   }
 
-  @RunStage(level=-2)
+  @RunStage(level = -2)
   @Test
   synchronized void testPackageInstall() {
     // WARNING: sometimes packages do not install because the server is busy
-    for (int i=3; pkg.install() && i>0; i--) {
+    for (int i = 3; pkg.install() && i > 0; i--) {
       recordFailure("can not install package $name will retry $i times");
     }
 
     // TODO: we need to come up with a way to abort any further execution to avoid spurious failures
 
     checkThat("package $name is expected to be installed",
-              pm.isInstalled(pkg), equalTo(true));
+      pm.isInstalled(pkg), equalTo(true));
 
     pkg.refresh();
   }
@@ -212,19 +218,22 @@ class TestPackagesBasics extends PackageTestCommon {
   Map getMapN(Node node) {
     String packagerType = pm.getType();
     Map res = [:];
-                                 node.attributes()
+    node.attributes()
     node.children().each {
       String key = it.name().toString();
-      if (key == "tag" && it.attributes()["name"] != null) { // <tag name="foo"/> -> <foo/>
+      if (key == "tag" && it.attributes()["name"] != null) {
+        // <tag name="foo"/> -> <foo/>
         key = it.attributes()["name"];
       }
       def value = null;
       if (it.children().size() == 0) {  // empty tags <foo/>
         Map attr = it.attributes();
         value = (attr.size() > 0) ? attr : key;
-      } else if (it.children().size() == 1 && it.children().get(0) instanceof java.lang.String) { // text tags <foo>bar</foo>
+      } else if (it.children().size() == 1 && it.children().get(0) instanceof java.lang.String) {
+        // text tags <foo>bar</foo>
         value = it.text();
-      } else if (["apt", "yum", "zypper"].contains(key)) { // poor man's XML filtering
+      } else if (["apt", "yum", "zypper"].contains(key)) {
+        // poor man's XML filtering
         res.putAll((packagerType == key) ? getMapN(it) : [:]);
       } else {
         value = getMapN(it);
@@ -244,10 +253,10 @@ class TestPackagesBasics extends PackageTestCommon {
           if (res[key] instanceof Map && value instanceof Map) {
             res[key].putAll(value);
           } else {
-          if (!(res[key] instanceof List)) {
-            res[key] = [res[key]];
-          }
-          res[key].add(value);
+            if (!(res[key] instanceof List)) {
+              res[key] = [res[key]];
+            }
+            res[key].add(value);
           }
         }
       }
@@ -258,9 +267,9 @@ class TestPackagesBasics extends PackageTestCommon {
 
   public void checkRemoval() {
     checkThat("package $name failed to be removed",
-              pkg.remove(), equalTo(0));
+      pkg.remove(), equalTo(0));
     checkThat("package $name is NOT expected to remain installed after removal",
-              pm.isInstalled(pkg), equalTo(false));
+      pm.isInstalled(pkg), equalTo(false));
 
     checkPackageFilesGotRemoved(getMap(golden.content));
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasicsWithRM.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasicsWithRM.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasicsWithRM.groovy
index 913f61b..7ef2cf1 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasicsWithRM.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesBasicsWithRM.groovy
@@ -28,7 +28,7 @@ class TestPackagesBasicsWithRM extends TestPackagesBasics {
     super(pkgName, pkgGolden);
   }
 
-  @RunStage(level=10)
+  @RunStage(level = 10)
   @Test
   void testPackageRemove() {
     checkRemoval();

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy
index 32f59c2..f84ce1f 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributed.groovy
@@ -28,17 +28,17 @@ class TestPackagesPseudoDistributed extends TestPackagesBasics {
     super(pkgName, pkgGolden);
   }
 
-  @RunStage(level=-3)
+  @RunStage(level = -3)
   @Test
   synchronized void testRemoteMetadata() {
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   void testPackageUpgrade() {
     if (isUpgrade()) {
       checkThat("upgrade sequence on a package $name failed to be executed",
-                BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
+        BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedDependency.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedDependency.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedDependency.groovy
index 2df72fc..20147e9 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedDependency.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedDependency.groovy
@@ -28,17 +28,17 @@ class TestPackagesPseudoDistributedDependency extends TestPackagesBasics {
     super(pkgName, pkgGolden);
   }
 
-  @RunStage(level=-3)
+  @RunStage(level = -3)
   @Test
   synchronized void testRemoteMetadata() {
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   void testPackageUpgrade() {
     if (isUpgrade()) {
       checkThat("upgrade sequence on a package $name failed to be executed",
-                BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
+        BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedFileContents.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedFileContents.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedFileContents.groovy
index 47699d1..61085b5 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedFileContents.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedFileContents.groovy
@@ -28,17 +28,17 @@ class TestPackagesPseudoDistributedFileContents extends TestPackagesBasics {
     super(pkgName, pkgGolden);
   }
 
-  @RunStage(level=-3)
+  @RunStage(level = -3)
   @Test
   synchronized void testRemoteMetadata() {
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   void testPackageUpgrade() {
     if (isUpgrade()) {
       checkThat("upgrade sequence on a package $name failed to be executed",
-                BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
+        BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedServices.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedServices.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedServices.groovy
index 0647f93..fb72eec 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedServices.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedServices.groovy
@@ -28,17 +28,17 @@ class TestPackagesPseudoDistributedServices extends TestPackagesBasics {
     super(pkgName, pkgGolden);
   }
 
-  @RunStage(level=-3)
+  @RunStage(level = -3)
   @Test
   synchronized void testRemoteMetadata() {
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   void testPackageUpgrade() {
     if (isUpgrade()) {
       checkThat("upgrade sequence on a package $name failed to be executed",
-                BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
+        BTUpgradeSequence.execute(name, System.getProperty("bigtop.prev.repo.version"), "0.5.0"), equalTo(0));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedState.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedState.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedState.groovy
index 55365aa..05ea260 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedState.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedState.groovy
@@ -22,8 +22,8 @@ import org.junit.runner.RunWith
 
 @RunWith(Suite.class)
 @Suite.SuiteClasses([
-  TestPackagesPseudoDistributed.class,
-  TestServices.class,
+TestPackagesPseudoDistributed.class,
+TestServices.class,
 ])
 class TestPackagesPseudoDistributedState {
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedUpgrade.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedUpgrade.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedUpgrade.groovy
index 944204d..7aa8ca6 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedUpgrade.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedUpgrade.groovy
@@ -23,11 +23,11 @@ import org.junit.runner.RunWith
 @RunWith(Suite.class)
 
 @Suite.SuiteClasses([
-  DeployCDH.class,
-  TestServicesCreateState.class,
-  TestPackagesPseudoDistributed.class,
-  TestServicesCreateStateMissing.class,
-  TestServicesVerifyState.class,
+DeployCDH.class,
+TestServicesCreateState.class,
+TestPackagesPseudoDistributed.class,
+TestServicesCreateStateMissing.class,
+TestServicesVerifyState.class,
 ])
 class TestPackagesPseudoDistributedUpgrade {
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy
index d61285d..d268c72 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestPackagesPseudoDistributedWithRM.groovy
@@ -33,12 +33,13 @@ class TestPackagesPseudoDistributedWithRM extends TestPackagesPseudoDistributed
     super(pkgName, pkgGolden);
   }
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   void testPackageRemove() {
     checkComplimentary32bitInstall();
     checkDaemonStart();
-    sleep(3001); // TODO FIXME: CDH-2816 should address the timing of daemons start.
+    sleep(3001);
+    // TODO FIXME: CDH-2816 should address the timing of daemons start.
     checkRemoval();
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy
index 4f20524..9f32fe5 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServices.groovy
@@ -73,21 +73,21 @@ class TestServices {
     }
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   void createState() {
     checkThat("failed to configure service ${svcs.get(0).getName()}",
-              verifier.config(), equalTo(true));
+      verifier.config(), equalTo(true));
 
     svcs.each {
       checkThat("service ${it.getName()} failed to start",
-                it.start(), equalTo(0));
+        it.start(), equalTo(0));
     }
 
     sleep(60001);
     verifier.createState();
     checkThat("initial state verification failed",
-              verifier.verifyState(), equalTo(true));
+      verifier.verifyState(), equalTo(true));
 
     svcs.reverseEach {
       // TODO: we're only trying the best we can here
@@ -103,21 +103,21 @@ class TestServices {
     }
   }
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   void verifyState() {
     svcs.each {
       checkThat("failed to configure service ${it.getName()}",
-                verifier.config(), equalTo(true));
+        verifier.config(), equalTo(true));
     }
 
     svcs.each {
       checkThat("service ${it.getName()} failed to start",
-                it.start(), equalTo(0));
+        it.start(), equalTo(0));
     }
     sleep(60001);
     checkThat("state verification failed after daemons got restarted",
-              verifier.verifyState(), equalTo(true));
+      verifier.verifyState(), equalTo(true));
 
     svcs.reverseEach { it.stop(); }
     sleep(5001);
@@ -127,7 +127,7 @@ class TestServices {
       if (!shRoot.getRet()) {
         shRoot.exec("kill -9 `ps -U${it} -opid=`");
         checkThat("service running under the name of $it is supposed to be stopped, but it is not",
-                  true, equalTo(false));
+          true, equalTo(false));
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateState.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateState.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateState.groovy
index f877411..866eda1 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateState.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateState.groovy
@@ -25,14 +25,14 @@ class TestServicesCreateState extends TestServices {
   @Parameters
   static Map<String, Object[]> generateTests() {
     return selectServices(System.getProperty("bigtop.prev.repo.version",
-                                      System.getProperty("bigtop.repo.version", "0.6.0")));
+      System.getProperty("bigtop.repo.version", "0.6.0")));
   }
 
   TestServicesCreateState(Map.Entry ent) {
     super(ent);
   }
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   void verifyState() {
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateStateMissing.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateStateMissing.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateStateMissing.groovy
index 7e714ea..35cd271 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateStateMissing.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesCreateStateMissing.groovy
@@ -22,7 +22,7 @@ import org.junit.runners.Parameterized.Parameters
 class TestServicesCreateStateMissing extends TestServicesCreateState {
   @Parameters
   static Map<String, Object[]> generateTests() {
-    return selectServices("${System.getProperty('bigtop.prev.repo.version','0.5.0')}..${System.getProperty('bigtop.repo.version', '0.6.0')}");
+    return selectServices("${System.getProperty('bigtop.prev.repo.version', '0.5.0')}..${System.getProperty('bigtop.repo.version', '0.6.0')}");
   }
 
   TestServicesCreateStateMissing(Map.Entry ent) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesVerifyState.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesVerifyState.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesVerifyState.groovy
index b2b1076..36da0e5 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesVerifyState.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/TestServicesVerifyState.groovy
@@ -26,7 +26,7 @@ class TestServicesVerifyState extends TestServices {
     super(ent);
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   void createState() {
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/phoenix/src/main/groovy/org/apache/bigtop/itest/phoenix/smoke/TestPhoenixSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/phoenix/src/main/groovy/org/apache/bigtop/itest/phoenix/smoke/TestPhoenixSmoke.groovy b/bigtop-tests/test-artifacts/phoenix/src/main/groovy/org/apache/bigtop/itest/phoenix/smoke/TestPhoenixSmoke.groovy
index df53e5e..68385e0 100644
--- a/bigtop-tests/test-artifacts/phoenix/src/main/groovy/org/apache/bigtop/itest/phoenix/smoke/TestPhoenixSmoke.groovy
+++ b/bigtop-tests/test-artifacts/phoenix/src/main/groovy/org/apache/bigtop/itest/phoenix/smoke/TestPhoenixSmoke.groovy
@@ -46,11 +46,11 @@ public class TestPhoenixSmoke {
   // running cluster as well.
 
   static void runTest(String testName) {
-    sh.exec("HBASE_CLASSPATH=" + phoenixClientJar + ":" + phoenixCoreJar + ":" + phoenixCoreTestsJar + 
+    sh.exec("HBASE_CLASSPATH=" + phoenixClientJar + ":" + phoenixCoreJar + ":" + phoenixCoreTestsJar +
       " hbase org.junit.runner.JUnitCore " + testName);
     assertTrue(testName + " failed", sh.getRet() == 0);
   }
- 
+
   @Test
   public void testAlterTable() {
     runTest("org.apache.phoenix.end2end.AlterTableIT")
@@ -60,7 +60,7 @@ public class TestPhoenixSmoke {
   public void testArithmeticQuery() {
     runTest("org.apache.phoenix.end2end.ArithmeticQueryIT")
   }
-  
+
   @Test
   public void testArray() {
     runTest("org.apache.phoenix.end2end.ArrayIT")
@@ -160,7 +160,7 @@ public class TestPhoenixSmoke {
   public void testHashJoin() {
     runTest("org.apache.phoenix.end2end.HashJoinIT")
   }
-  
+
   @Test
   public void testInMemoryOrderBy() {
     runTest("org.apache.phoenix.end2end.InMemoryOrderByIT")
@@ -215,6 +215,7 @@ public class TestPhoenixSmoke {
   public void testQueryExecWithoutSCN() {
     runTest("org.apache.phoenix.end2end.QueryExecWithoutSCNIT")
   }
+
   @Test
   public void testQuery() {
     runTest("org.apache.phoenix.end2end.QueryIT")
@@ -304,12 +305,12 @@ public class TestPhoenixSmoke {
   public void testTenantSpecificTablesDDL() {
     runTest("org.apache.phoenix.end2end.TenantSpecificTablesDDLIT")
   }
-  
+
   @Test
   public void testTenantSpecificTablesDML() {
     runTest("org.apache.phoenix.end2end.TenantSpecificTablesDMLIT")
   }
-  
+
   @Test
   public void testTenantSpecificViewIndex() {
     runTest("org.apache.phoenix.end2end.TenantSpecificViewIndexIT")

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/SolrTestBase.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/SolrTestBase.groovy b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/SolrTestBase.groovy
index a41883f..ef82b76 100644
--- a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/SolrTestBase.groovy
+++ b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/SolrTestBase.groovy
@@ -62,7 +62,7 @@ public class SolrTestBase {
     String fullUrl = _baseURL + url + ((url.indexOf("?") >= 0) ? "&" : "?") + "wt=json"
     URLConnection conn = new URL(fullUrl).openConnection()
     BufferedReader res = new BufferedReader(new InputStreamReader(
-            conn.getInputStream()))
+      conn.getInputStream()))
     String inputLine;
     StringBuilder sb = new StringBuilder()
     while ((inputLine = res.readLine()) != null) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestIndexing.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestIndexing.groovy b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestIndexing.groovy
index 97f3edb..756e3ff 100644
--- a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestIndexing.groovy
+++ b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestIndexing.groovy
@@ -26,7 +26,7 @@ class TestIndexing extends SolrTestBase {
     // Index a couple of documents
     def builder = new groovy.json.JsonBuilder()
     builder([["id": "doc1", "name": URLEncoder.encode("first test document")],
-            ["id": "doc2", "name": URLEncoder.encode("second test document")]])
+      ["id": "doc2", "name": URLEncoder.encode("second test document")]])
     doReq(_updatePathJSON + builder.toString() + "&commit=true")
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestPing.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestPing.groovy b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestPing.groovy
index f03f1d9..696c661 100644
--- a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestPing.groovy
+++ b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestPing.groovy
@@ -17,6 +17,7 @@
 package org.apache.bigtop.itest.solr.smoke
 
 import org.junit.Test
+
 /**
  * Utterly trivial test to see if the server is running
  */

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestSimple.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestSimple.groovy b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestSimple.groovy
index 4448e76..e012d7c 100644
--- a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestSimple.groovy
+++ b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestSimple.groovy
@@ -31,8 +31,8 @@ class TestSimple extends SolrTestBase {
     //doReq(_updatePathJSON + builder.toString())
     StringBuilder sb = new StringBuilder()
     sb.append("<add><doc><field name=\"id\">doc1</field><field name=\"name\">first test document").
-       append("</field></doc><doc><field name=\"id\">doc2</field><field name=\"name\">second test document").
-       append("</field></doc></add>")
+      append("</field></doc><doc><field name=\"id\">doc2</field><field name=\"name\">second test document").
+      append("</field></doc></add>")
     doReq(_updatePathXML + URLEncoder.encode(sb.toString()))
     testEquals(doReq("/select?q=*:*"), "response.numFound", "2")
     testEquals(doReq("/select?q=name:\"first+test+document\""), "response.numFound", "1")

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestStatistics.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestStatistics.groovy b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestStatistics.groovy
index dff83dd..3a72f57 100644
--- a/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestStatistics.groovy
+++ b/bigtop-tests/test-artifacts/solr/src/main/groovy/org/apache/bigtop/itest/solr/smoke/TestStatistics.groovy
@@ -24,11 +24,12 @@ import org.junit.Test
  */
 class TestStatistics extends SolrTestBase {
   @Test
-  public void testCache()  {
+  public void testCache() {
     Object res = doReq(_adminPath + "/mbeans?stats=true")
     ArrayList<Object> beans = res."solr-mbeans"
     for (int idx = 0; idx < beans.size(); idx++) {
-      if (beans[idx] instanceof String && "CACHE".equals(beans[idx])) { // Next object is the stats data for caches.
+      if (beans[idx] instanceof String && "CACHE".equals(beans[idx])) {
+        // Next object is the stats data for caches.
         Object hits = beans[idx + 1].filterCache.stats.hits
         break;
       }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/spark/src/main/groovy/org/apache/bigtop/itest/spark/TestSparkSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/spark/src/main/groovy/org/apache/bigtop/itest/spark/TestSparkSmoke.groovy b/bigtop-tests/test-artifacts/spark/src/main/groovy/org/apache/bigtop/itest/spark/TestSparkSmoke.groovy
index 400cc92..4fcb67e 100644
--- a/bigtop-tests/test-artifacts/spark/src/main/groovy/org/apache/bigtop/itest/spark/TestSparkSmoke.groovy
+++ b/bigtop-tests/test-artifacts/spark/src/main/groovy/org/apache/bigtop/itest/spark/TestSparkSmoke.groovy
@@ -44,10 +44,10 @@ public class TestSparkSmoke implements Serializable {
 
   @BeforeClass
   static void setUp() {
-   sh.exec("pwd")
-   pwd = sh.out
-   int lastIndex = pwd.length() - 1
-   pwd = pwd.substring(1, lastIndex)
+    sh.exec("pwd")
+    pwd = sh.out
+    int lastIndex = pwd.length() - 1
+    pwd = pwd.substring(1, lastIndex)
   }
 
   @Test
@@ -79,7 +79,7 @@ public class TestSparkSmoke implements Serializable {
     String[] jars = [System.getProperty("sparkJar"), org.apache.bigtop.itest.JarContent.getJarURL("groovy.lang.GroovyObject")];
 
     JavaSparkContext sc = new JavaSparkContext("local", "Simple Job",
-        SPARK_HOME, jars);
+      SPARK_HOME, jars);
 
     JavaRDD<String> logData = sc.textFile(logFile).cache();
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHBase.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHBase.groovy b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHBase.groovy
index 9a277a5..9dbdc7a 100644
--- a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHBase.groovy
+++ b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHBase.groovy
@@ -59,6 +59,7 @@ public class IntegrationTestSqoopHBase {
     test.exec("hadoop fs -mkdir $path");
     return test.getRet();
   }
+
   public static int rmr(String path) {
     Shell rmr = new Shell("/bin/bash -s");
     rmr.exec("hadoop fs -rmr -skipTrash $path");
@@ -66,11 +67,11 @@ public class IntegrationTestSqoopHBase {
   }
 
   public static String mktemps() {
-     mkdir("IntegrationTestSqoopHBase-${(new Date().getTime())}");
+    mkdir("IntegrationTestSqoopHBase-${(new Date().getTime())}");
   }
 
   @Before
-  public void setUp () {
+  public void setUp() {
     JarContent.unpackJarContainer(IntegrationTestSqoopHBase.class, '.', DATA_DIR);
 
     rmr('test_table');
@@ -85,7 +86,7 @@ public class IntegrationTestSqoopHBase {
     def out = shell.out.join('\n');
 
     assertFalse("Unable to create HBase table by script create-table.hxt",
-        (out =~ /ERROR/).find());
+      (out =~ /ERROR/).find());
   }
 
   @After
@@ -95,7 +96,7 @@ public class IntegrationTestSqoopHBase {
   }
 
   @Test
-  public void hBaseSqoop () {
+  public void hBaseSqoop() {
     def hostname = shell.exec('hostname').out.get(0);
     def dbURL = "jdbc:mysql://$hostname/testhbase";
     def OUTFILE = 'outfile.txt';
@@ -108,6 +109,6 @@ public class IntegrationTestSqoopHBase {
     // TODO need to conver shell callouts to power tools with Java parsing of
     // ' 10                                    column=data:b, timestamp=1301075559859, value=ten'
     assertEquals("HBase scan output did not match expected output. File: $OUTFILE",
-        0, shell.exec("diff -u $OUTFILE expected-hbase-output.txt.resorted").getRet() );
+      0, shell.exec("diff -u $OUTFILE expected-hbase-output.txt.resorted").getRet());
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHive.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHive.groovy b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHive.groovy
index eb14fa5..30ffdb3 100644
--- a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHive.groovy
+++ b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/integration/sqoop/IntegrationTestSqoopHive.groovy
@@ -60,7 +60,7 @@ class IntegrationTestSqoopHive {
   }
 
   @Before
-  public void setUp () {
+  public void setUp() {
     JarContent.unpackJarContainer(IntegrationTestSqoopHive.class, '.', DATA_DIR);
 
     // MySQL preparations
@@ -85,7 +85,7 @@ class IntegrationTestSqoopHive {
   }
 
   @Test
-  public void hiveSqoop () {
+  public void hiveSqoop() {
     def hostname = shell.exec('hostname').out.get(0);
     def dbURL = "jdbc:mysql://$hostname/testhive";
     //Run Sqoop Hive import now
@@ -95,6 +95,6 @@ class IntegrationTestSqoopHive {
     shell.exec("${HIVE_HOME}/bin/hive -f $DATA_DIR/hive-select-table.hql > $OUTFILE");
     assertEquals("Unable to run hive-select-table.hql script", 0, shell.ret);
     assertEquals("Hive output did not match expected output. File: $OUTFILE",
-        0, shell.exec("diff -u $OUTFILE $DATA_DIR/expected-hive-output.txt").getRet() );
+      0, shell.exec("diff -u $OUTFILE $DATA_DIR/expected-hive-output.txt").getRet());
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
index 145c1a6..d310a3b 100644
--- a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
+++ b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
@@ -50,15 +50,15 @@ class TestSqoopExport {
 
   private static final String MYSQL_COMMAND =
     "mysql -h $MYSQL_HOST --user=$MYSQL_USER" +
-    (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
+      (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
   private static final String MYSQL_DBNAME = System.getProperty("mysql.dbname", "mysqltestdb");
   private static final String SQOOP_CONNECTION_STRING =
     "jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME";
   private static final String SQOOP_CONNECTION =
     "--connect jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME --username=$MYSQL_USER" +
-    (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
+      (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
   static {
-    System.out.println("SQOOP_CONNECTION string is " + SQOOP_CONNECTION );
+    System.out.println("SQOOP_CONNECTION string is " + SQOOP_CONNECTION);
   }
   private static final String DATA_DIR = System.getProperty("data.dir", "mysql-files");
   private static final String INPUT = System.getProperty("input.dir", "/tmp/input-dir");
@@ -71,7 +71,7 @@ class TestSqoopExport {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $INPUT");
       assertTrue("Deletion of previous $INPUT from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
     sh.exec("hadoop fs -mkdir $INPUT");
     assertTrue("Could not create $INPUT directory", sh.getRet() == 0);
@@ -90,7 +90,7 @@ class TestSqoopExport {
     assertTrue("Could not create $INPUT/t_int directory", sh.getRet() == 0);
 
     // unpack resource
-    JarContent.unpackJarContainer(TestSqoopExport.class, '.' , null)
+    JarContent.unpackJarContainer(TestSqoopExport.class, '.', null)
 
     // upload data to HDFS 
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-testtable.out $INPUT/testtable/part-m-00000");
@@ -98,8 +98,8 @@ class TestSqoopExport {
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_date-export.out $INPUT/t_date/part-m-00000");
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_string.out $INPUT/t_string/part-m-00000");
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_fp.out $INPUT/t_fp/part-m-00000");
-    sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_int.out $INPUT/t_int/part-m-00000"); 
-    
+    sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_int.out $INPUT/t_int/part-m-00000");
+
     //create db
     sh.exec("cat $DATA_DIR/mysql-create-db.sql | $MYSQL_COMMAND");
     //create tables
@@ -108,12 +108,12 @@ class TestSqoopExport {
 
   @AfterClass
   static void tearDown() {
-    if ('YES'.equals(System.getProperty('delete.testdata','no').toUpperCase())) {
+    if ('YES'.equals(System.getProperty('delete.testdata', 'no').toUpperCase())) {
       sh.exec("hadoop fs -test -e $INPUT");
       if (sh.getRet() == 0) {
         sh.exec("hadoop fs -rmr -skipTrash $INPUT");
         assertTrue("Deletion of $INPUT from HDFS failed",
-            sh.getRet() == 0);
+          sh.getRet() == 0);
       }
     }
   }
@@ -212,7 +212,7 @@ class TestSqoopExport {
 
     sh.exec("echo 'use mysqltestdb;select * from t_bool' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_bool.out");
     assertEquals("sqoop export did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_bool-export.out t_bool.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_bool-export.out t_bool.out").getRet());
   }
 
   @Test
@@ -223,7 +223,7 @@ class TestSqoopExport {
 
     sh.exec("echo 'use mysqltestdb;select * from t_int' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_int.out");
     assertEquals("sqoop export did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_int.out t_int.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_int.out t_int.out").getRet());
   }
 
   @Test
@@ -234,7 +234,7 @@ class TestSqoopExport {
 
     sh.exec("echo 'use mysqltestdb;select * from t_fp' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_fp.out");
     assertEquals("sqoop export did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_fp.out t_fp.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_fp.out t_fp.out").getRet());
   }
 
   @Test
@@ -245,7 +245,7 @@ class TestSqoopExport {
 
     sh.exec("echo 'use mysqltestdb;select * from t_date' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_date.out");
     assertEquals("sqoop export did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_date.out t_date.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_date.out t_date.out").getRet());
   }
 
   @Test
@@ -256,7 +256,7 @@ class TestSqoopExport {
 
     sh.exec("echo 'use mysqltestdb;select * from t_string' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_string.out");
     assertEquals("sqoop export did not write expected data",
-            0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
   }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
index 00ef9fe..c2d7501 100644
--- a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
+++ b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
@@ -53,15 +53,15 @@ class TestSqoopImport {
 
   private static final String MYSQL_COMMAND =
     "mysql -h $MYSQL_HOST --user=$MYSQL_USER" +
-    (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
+      (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
   private static final String MYSQL_DBNAME = System.getProperty("mysql.dbname", "mysqltestdb");
   private static final String SQOOP_CONNECTION_STRING =
     "jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME";
   private static final String SQOOP_CONNECTION =
     "--connect jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME --username=$MYSQL_USER" +
-    (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
+      (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
   static {
-    System.out.println("SQOOP_CONNECTION string is " + SQOOP_CONNECTION );
+    System.out.println("SQOOP_CONNECTION string is " + SQOOP_CONNECTION);
   }
   private static final String DATA_DIR = System.getProperty("data.dir", "mysql-files");
   private static final String OUTPUT = System.getProperty("output.dir", "/tmp/output-dir");
@@ -74,10 +74,10 @@ class TestSqoopImport {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $OUTPUT");
       assertTrue("Deletion of previous $OUTPUT from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
     // unpack resource
-    JarContent.unpackJarContainer(TestSqoopImport.class, '.' , null)
+    JarContent.unpackJarContainer(TestSqoopImport.class, '.', null)
 
     // create the database
     sh.exec("cat $DATA_DIR/mysql-create-db.sql | $MYSQL_COMMAND");
@@ -89,12 +89,12 @@ class TestSqoopImport {
 
   @AfterClass
   static void tearDown() {
-    if ('YES'.equals(System.getProperty('delete.testdata','no').toUpperCase())) {
+    if ('YES'.equals(System.getProperty('delete.testdata', 'no').toUpperCase())) {
       sh.exec("hadoop fs -test -e $OUTPUT");
       if (sh.getRet() == 0) {
         sh.exec("hadoop fs -rmr -skipTrash $OUTPUT");
         assertTrue("Deletion of $OUTPUT from HDFS failed",
-            sh.getRet() == 0);
+          sh.getRet() == 0);
       }
     }
   }
@@ -156,7 +156,7 @@ class TestSqoopImport {
     assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, job.getPersistenceId());
   }
 
-  protected void runSqoopClient(String tableName=null, String partitionColumn=null, String tableColumns=null, String tableSQL=null, String outputSubdir=null, int extractors=0, int loaders=0) {
+  protected void runSqoopClient(String tableName = null, String partitionColumn = null, String tableColumns = null, String tableSQL = null, String outputSubdir = null, int extractors = 0, int loaders = 0) {
     // Connection creation
     MConnection connection = getClient().newConnection(1L);
     fillConnectionForm(connection);
@@ -168,35 +168,35 @@ class TestSqoopImport {
     // Connector values
     MFormList connectorForms = job.getConnectorPart();
 
-    if(tableName != null) {
+    if (tableName != null) {
       connectorForms.getStringInput("table.tableName").setValue(tableName);
     }
 
-    if(partitionColumn != null) {
+    if (partitionColumn != null) {
       connectorForms.getStringInput("table.partitionColumn").setValue(partitionColumn);
     }
 
-    if(tableColumns != null) {
+    if (tableColumns != null) {
       connectorForms.getStringInput("table.columns").setValue(tableColumns);
     }
 
-    if(tableSQL != null) {
+    if (tableSQL != null) {
       connectorForms.getStringInput("table.sql").setValue(tableSQL);
     }
 
     // Framework values
     MFormList frameworkForms = job.getFrameworkPart();
 
-    if(extractors > 0) {
+    if (extractors > 0) {
       frameworkForms.getIntegerInput("throttling.extractors").setValue(extractors);
     }
 
-    if(loaders > 0) {
+    if (loaders > 0) {
       frameworkForms.getIntegerInput("throttling.loaders").setValue(loaders);
     }
 
     String outSubdir;
-    if(outputSubdir == null) {
+    if (outputSubdir == null) {
       outSubdir = tableName;
     } else {
       outSubdir = outputSubdir;
@@ -227,7 +227,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/t_bool/part-* > t_bool.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_bool.out t_bool.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_bool.out t_bool.out").getRet());
   }
 
   @Test
@@ -239,7 +239,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/t_int/part-* > t_int.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_int.out t_int.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_int.out t_int.out").getRet());
   }
 
   @Test
@@ -251,7 +251,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/t_fp/part-* > t_fp.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_fp.out t_fp.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_fp.out t_fp.out").getRet());
   }
 
   @Test
@@ -263,7 +263,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/t_date/part-* > t_date.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_date.out t_date.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_date.out t_date.out").getRet());
   }
 
   @Test
@@ -275,7 +275,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/t_string/part-* > t_string.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
   }
 
   @Test
@@ -288,7 +288,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/testtable/part-* > columns.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-columns.out columns.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-columns.out columns.out").getRet());
   }
 
   @Test
@@ -305,7 +305,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/$outputSubdir/part-*0 > num-mappers.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out num-mappers.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out num-mappers.out").getRet());
   }
 
   @Test
@@ -320,7 +320,7 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/$outputSubdir/part-* > query.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-query.out query.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-query.out query.out").getRet());
   }
 
   @Test
@@ -335,10 +335,9 @@ class TestSqoopImport {
 
     sh.exec("hadoop fs -cat $OUTPUT/$outputSubdir/part-* > split-by.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out split-by.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out split-by.out").getRet());
   }
 
-
   //----------------------------------------@Ignore("Backward Compatibility")------------------------------------------//
   // The functionality of the tests below is not currently supported by Sqoop 2.
 
@@ -347,14 +346,14 @@ class TestSqoopImport {
   @Test
   public void testImportAllTables() {
     String SQOOP_CONNECTION_IMPORT_ALL =
-    "--connect jdbc:mysql://$MYSQL_HOST/mysqltestdb2 --username=$MYSQL_USER" +
-    (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
+      "--connect jdbc:mysql://$MYSQL_HOST/mysqltestdb2 --username=$MYSQL_USER" +
+        (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
 
     sh.exec("sqoop import-all-tables $SQOOP_CONNECTION_IMPORT_ALL --warehouse-dir $OUTPUT/alltables");
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/alltables/testtable*/part-* > all-tables.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-all-tables.out all-tables.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-all-tables.out all-tables.out").getRet());
   }
 
   @Ignore("Backward Compatibility")
@@ -367,7 +366,7 @@ class TestSqoopImport {
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/append/part-* > append.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-append.out append.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-append.out append.out").getRet());
   }
 
   @Ignore("Backward Compatibility")
@@ -377,7 +376,7 @@ class TestSqoopImport {
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/direct/part-* > direct.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out direct.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out direct.out").getRet());
   }
 
   @Ignore("Backward Compatibility")
@@ -387,7 +386,7 @@ class TestSqoopImport {
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/warehouse-dir/testtable/part-* > warehouse-dir.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out warehouse-dir.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out warehouse-dir.out").getRet());
   }
 
   @Ignore("Backward Compatibility")
@@ -397,7 +396,7 @@ class TestSqoopImport {
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/where-clause/part-* > where-clause.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-where-clause.out where-clause.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-where-clause.out where-clause.out").getRet());
   }
 
   @Ignore("Backward Compatibility")
@@ -407,7 +406,7 @@ class TestSqoopImport {
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/null-string/part-* > null-string.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-null-string.out null-string.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-null-string.out null-string.out").getRet());
   }
 
   @Ignore("Backward Compatibility")
@@ -417,7 +416,7 @@ class TestSqoopImport {
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
     sh.exec("hadoop fs -cat $OUTPUT/non-null-string/part-* > non-null-string.out");
     assertEquals("sqoop import did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-null-non-string.out non-null-string.out").getRet());
+      0, sh.exec("diff -u $DATA_DIR/sqoop-null-non-string.out non-null-string.out").getRet());
   }
 
 }


[3/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across test-artifacts

Posted by db...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
index 9700b08..6b6fe7d 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
@@ -46,9 +46,9 @@ class TestHadoopExamples {
     JarContent.getJarName(HADOOP_MAPRED_HOME, 'hadoop.*examples.*.jar');
   static {
     assertNotNull("HADOOP_MAPRED_HOME has to be set to run this test",
-        HADOOP_MAPRED_HOME);
+      HADOOP_MAPRED_HOME);
     assertNotNull("HADOOP_CONF_DIR has to be set to run this test",
-        HADOOP_CONF_DIR);
+      HADOOP_CONF_DIR);
     assertNotNull("Can't find hadoop-examples.jar file", hadoopExamplesJar);
   }
   static final String HADOOP_EXAMPLES_JAR =
@@ -57,52 +57,52 @@ class TestHadoopExamples {
   static Shell sh = new Shell("/bin/bash -s");
 
   /**
-  * Public so that we can run these tests as scripts
-  * and the scripts can manually copy resoruces into DFS
-  * See BIGTOP-1222 for example.
-  */
-  public static final String SOURCE ="bigtop-tests/test-artifacts/hadoop/src/main/resources/"
+   * Public so that we can run these tests as scripts
+   * and the scripts can manually copy resoruces into DFS
+   * See BIGTOP-1222 for example.
+   */
+  public static final String SOURCE = "bigtop-tests/test-artifacts/hadoop/src/main/resources/"
   private static final String EXAMPLES = "examples";
   private static final String EXAMPLES_OUT = "examples-output";
   private static Configuration conf;
 
   private static String mr_version = System.getProperty("mr.version", "mr2");
-  
+
   static final String RANDOMTEXTWRITER_TOTALBYTES = (mr_version == "mr1") ?
-      "test.randomtextwrite.total_bytes" : "mapreduce.randomtextwriter.totalbytes";
+    "test.randomtextwrite.total_bytes" : "mapreduce.randomtextwriter.totalbytes";
 
   @AfterClass
   public static void tearDown() {
     sh.exec("hadoop fs -rmr -skipTrash ${EXAMPLES}",
-            "hadoop fs -rmr -skipTrash ${EXAMPLES_OUT}");
+      "hadoop fs -rmr -skipTrash ${EXAMPLES_OUT}");
   }
 
 
   @BeforeClass
   static void setUp() {
     conf = new Configuration();
-    try{
-       //copy examples/ int /user/root/ and
-       //then create examples-output directory
-       TestUtils.unpackTestResources(TestHadoopExamples.class, EXAMPLES, null, EXAMPLES_OUT);
+    try {
+      //copy examples/ int /user/root/ and
+      //then create examples-output directory
+      TestUtils.unpackTestResources(TestHadoopExamples.class, EXAMPLES, null, EXAMPLES_OUT);
+    }
+    catch (java.lang.Throwable t) {
+      LOG.info("Failed to unpack jar resources.  Attemting to use bigtop sources");
+      def source = System.getenv("BIGTOP_HOME") + "/" + SOURCE;
+
+      assertNotNull("Can't copy test input files from bigtop source dir," +
+        "and jar specific attempt failed also", examples);
+
+      LOG.info("MAKING DIRECTORIES ..................... ${EXAMPLES} ${EXAMPLES_OUT}");
+
+      //add the files in resources/
+      sh.exec("hadoop fs -put ${source}/*.* .");
+      //add the directories under resources (like examples/)
+      sh.exec("hadoop fs -put ${source}/${EXAMPLES} ${EXAMPLES}");
+      sh.exec("hadoop fs -mkdir -p ${EXAMPLES_OUT}");
     }
-    catch(java.lang.Throwable t){
-        LOG.info("Failed to unpack jar resources.  Attemting to use bigtop sources");
-        def source = System.getenv("BIGTOP_HOME")+"/"+SOURCE;
-
-        assertNotNull("Can't copy test input files from bigtop source dir,"+
-                      "and jar specific attempt failed also", examples);
-
-        LOG.info("MAKING DIRECTORIES ..................... ${EXAMPLES} ${EXAMPLES_OUT}");
-
-        //add the files in resources/
-        sh.exec("hadoop fs -put ${source}/*.* .");
-        //add the directories under resources (like examples/)
-        sh.exec("hadoop fs -put ${source}/${EXAMPLES} ${EXAMPLES}");
-        sh.exec("hadoop fs -mkdir -p ${EXAMPLES_OUT}");
-   }
-   sh.exec("hadoop fs -ls ${EXAMPLES}");
-   assertTrue("Failed asserting that 'examples' were created in the DFS", sh.getRet()==0);
+    sh.exec("hadoop fs -ls ${EXAMPLES}");
+    assertTrue("Failed asserting that 'examples' were created in the DFS", sh.getRet() == 0);
   }
 
   static long terasortid = System.currentTimeMillis();
@@ -115,17 +115,17 @@ class TestHadoopExamples {
   public static String pi_samples = System.getProperty("pi_samples", "1000");
   static LinkedHashMap examples =
     [
-        pi                :"${pi_maps} ${pi_samples}",
-        wordcount         :"$EXAMPLES/text $EXAMPLES_OUT/wordcount",
-        teragen           :"${terasort_rows} teragen${terasortid}",
-        terasort          :"teragen${terasortid} terasort${terasortid}",
-        teravalidate      :"terasort${terasortid} tervalidate${terasortid}",
-        multifilewc       :"$EXAMPLES/text $EXAMPLES_OUT/multifilewc",
-        aggregatewordcount:"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordcount 2 textinputformat",
-        aggregatewordhist :"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordhist 2 textinputformat",
-        grep              :"$EXAMPLES/text $EXAMPLES_OUT/grep '[Cc]uriouser'",
-        secondarysort     :"$EXAMPLES/ints $EXAMPLES_OUT/secondarysort",
-        randomtextwriter  :"-D $RANDOMTEXTWRITER_TOTALBYTES=1073741824 $EXAMPLES_OUT/randomtextwriter"
+      pi: "${pi_maps} ${pi_samples}",
+      wordcount: "$EXAMPLES/text $EXAMPLES_OUT/wordcount",
+      teragen: "${terasort_rows} teragen${terasortid}",
+      terasort: "teragen${terasortid} terasort${terasortid}",
+      teravalidate: "terasort${terasortid} tervalidate${terasortid}",
+      multifilewc: "$EXAMPLES/text $EXAMPLES_OUT/multifilewc",
+      aggregatewordcount: "$EXAMPLES/text $EXAMPLES_OUT/aggregatewordcount 2 textinputformat",
+      aggregatewordhist: "$EXAMPLES/text $EXAMPLES_OUT/aggregatewordhist 2 textinputformat",
+      grep: "$EXAMPLES/text $EXAMPLES_OUT/grep '[Cc]uriouser'",
+      secondarysort: "$EXAMPLES/ints $EXAMPLES_OUT/secondarysort",
+      randomtextwriter: "-D $RANDOMTEXTWRITER_TOTALBYTES=1073741824 $EXAMPLES_OUT/randomtextwriter"
     ];
 
   private String testName;
@@ -147,10 +147,10 @@ class TestHadoopExamples {
 
   @Test
   void testMRExample() {
-    if(FailureVars.instance.getRunFailures()
-        || FailureVars.instance.getServiceRestart()
-        || FailureVars.instance.getServiceKill()
-        || FailureVars.instance.getNetworkShutdown()) {
+    if (FailureVars.instance.getRunFailures()
+      || FailureVars.instance.getServiceRestart()
+      || FailureVars.instance.getServiceKill()
+      || FailureVars.instance.getNetworkShutdown()) {
       runFailureThread();
     }
     sh.exec("hadoop jar $testJar $testName $testArgs");

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
index 40ad04d..7294197 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
@@ -48,13 +48,13 @@ class TestHadoopSmoke {
   static String nn = (new Configuration()).get(DFSConfigKeys.FS_DEFAULT_NAME_KEY)
 
   String cmd = "hadoop jar ${STREAMING_JAR}" +
-      " -D mapred.map.tasks=1 -D mapred.reduce.tasks=1 -D mapred.job.name=Experiment"
+    " -D mapred.map.tasks=1 -D mapred.reduce.tasks=1 -D mapred.job.name=Experiment"
   String cmd2 = " -input ${testDir}/cachefile/input.txt -mapper map.sh -file map.sh -reducer cat" +
-      " -output ${testDir}/cachefile/out -verbose"
+    " -output ${testDir}/cachefile/out -verbose"
   String arg = "${nn}/user/${System.properties['user.name']}/${testDir}/cachefile/cachedir.jar#testlink"
 
   @BeforeClass
-  static void  setUp() throws IOException {
+  static void setUp() throws IOException {
     String[] inputFiles = ["cachedir.jar", "input.txt"];
     try {
       TestUtils.unpackTestResources(TestHadoopSmoke.class, "${testDir}/cachefile", inputFiles, null);
@@ -71,7 +71,7 @@ class TestHadoopSmoke {
   @Test
   void testCacheArchive() {
     sh.exec("hadoop fs -rmr ${testDir}/cachefile/out",
-             cmd + ' -cacheArchive ' + arg + cmd2)
+      cmd + ' -cacheArchive ' + arg + cmd2)
     logError(sh)
     sh.exec("hadoop fs -cat ${testDir}/cachefile/out/part-00000")
     logError(sh)
@@ -82,7 +82,7 @@ class TestHadoopSmoke {
   @Test
   void testArchives() {
     sh.exec("hadoop fs -rmr ${testDir}/cachefile/out",
-             cmd + ' -archives ' + arg + cmd2)
+      cmd + ' -archives ' + arg + cmd2)
     logError(sh)
     sh.exec("hadoop fs -cat ${testDir}/cachefile/out/part-00000")
     logError(sh)

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
index 48bb1ec..19bbd8f 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
@@ -27,14 +27,14 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestNode {
- 
+
   // set debugging variable to true if you want error messages sent to stdout
   private static Shell sh = new Shell("/bin/bash");
 
   @BeforeClass
   public static void setUp() {
     // unpack resource
-    JarContent.unpackJarContainer(TestNode.class, "." , null);
+    JarContent.unpackJarContainer(TestNode.class, ".", null);
     System.out.println("Running Node commands:");
   }
 
@@ -43,10 +43,10 @@ public class TestNode {
   }
 
   @Test
-  public void testNodeBasic() { 
+  public void testNodeBasic() {
     // list
-    System.out.println("-list"); 
-    sh.exec("YARN_ROOT_LOGGER=WARN,console yarn node -list");    
+    System.out.println("-list");
+    sh.exec("YARN_ROOT_LOGGER=WARN,console yarn node -list");
     assertTrue("-list failed", sh.getRet() == 0);
 
     // status

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
index debbb16..d8fa74d 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
@@ -27,14 +27,14 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestRmAdmin {
- 
+
   // set debugging variable to true if you want error messages sent to stdout
   private static Shell sh = new Shell("/bin/bash");
 
   @BeforeClass
   public static void setUp() {
     // unpack resource
-    JarContent.unpackJarContainer(TestRmAdmin.class, "." , null);
+    JarContent.unpackJarContainer(TestRmAdmin.class, ".", null);
     System.out.println("Running RmAdmin commands:");
   }
 
@@ -43,10 +43,10 @@ public class TestRmAdmin {
   }
 
   @Test
-  public void testRmAdminBasic() { 
+  public void testRmAdminBasic() {
     // help
-    System.out.println("-help"); 
-    sh.exec("YARN_ROOT_LOGGER=WARN,console yarn rmadmin -help");    
+    System.out.println("-help");
+    sh.exec("YARN_ROOT_LOGGER=WARN,console yarn rmadmin -help");
     assertTrue("-help failed", sh.getRet() == 0);
 
     // getGroups
@@ -54,7 +54,7 @@ public class TestRmAdmin {
     sh.exec("YARN_ROOT_LOGGER=WARN,console yarn rmadmin -getGroups");
     assertTrue("-getGroups failed", sh.getRet() == 0);
   }
-  
+
   @Test
   public void testRmAdminRefreshcommands() {
     // refreshQueues

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
index d1cb391..657de61 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
@@ -47,16 +47,16 @@ public class IncrementalPELoad extends Configured implements Tool {
 
     private static final int ROWSPERSPLIT = 1024;
     private static final byte[][] FAMILIES
-      = { Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
-	  Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
+        = {Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
+        Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
 
     private int keyLength;
-    private static final int KEYLEN_DEFAULT=10;
-    private static final String KEYLEN_CONF="randomkv.key.length";
+    private static final int KEYLEN_DEFAULT = 10;
+    private static final String KEYLEN_CONF = "randomkv.key.length";
 
     private int valLength;
-    private static final int VALLEN_DEFAULT=10;
-    private static final String VALLEN_CONF="randomkv.val.length";
+    private static final int VALLEN_DEFAULT = 10;
+    private static final String VALLEN_CONF = "randomkv.val.length";
 
     @Override
     protected void setup(Context context)
@@ -69,10 +69,9 @@ public class IncrementalPELoad extends Configured implements Tool {
     }
 
     protected void map(NullWritable n1, NullWritable n2,
-        Mapper<NullWritable, NullWritable,
-               ImmutableBytesWritable,KeyValue>.Context context)
-        throws java.io.IOException ,InterruptedException
-    {
+                       Mapper<NullWritable, NullWritable,
+                           ImmutableBytesWritable, KeyValue>.Context context)
+        throws java.io.IOException, InterruptedException {
 
       byte keyBytes[] = new byte[keyLength];
       byte valBytes[] = new byte[valLength];
@@ -85,7 +84,7 @@ public class IncrementalPELoad extends Configured implements Tool {
 
         random.nextBytes(keyBytes);
         // Ensure that unique tasks generate unique keys
-        keyBytes[keyLength - 1] = (byte)(taskId & 0xFF);
+        keyBytes[keyLength - 1] = (byte) (taskId & 0xFF);
         random.nextBytes(valBytes);
         ImmutableBytesWritable key = new ImmutableBytesWritable(keyBytes);
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
index 47ea810..8b859f1 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
@@ -31,9 +31,12 @@ import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.AfterClass;
 import org.junit.Test;
+
 import static org.junit.Assert.assertTrue;
+
 import org.apache.bigtop.itest.shell.Shell;
 import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
+
 import static org.apache.bigtop.itest.LogErrorsUtils.logError;
 
 public class TestCopyTable {
@@ -48,7 +51,7 @@ public class TestCopyTable {
   private static HTable origTable;
   private static HTable copyTable;
   private static String copyTableCmd =
-    "hbase org.apache.hadoop.hbase.mapreduce.CopyTable";
+      "hbase org.apache.hadoop.hbase.mapreduce.CopyTable";
 
   private static int NUM_ROWS = 5000;
   private static Configuration conf;
@@ -60,12 +63,12 @@ public class TestCopyTable {
     admin = new HBaseAdmin(conf);
 
     HTableDescriptor htd_orig =
-      HBaseTestUtil.createTestTableDescriptor("orig", TEST_FAMILY);
+        HBaseTestUtil.createTestTableDescriptor("orig", TEST_FAMILY);
     admin.createTable(htd_orig);
     orig = htd_orig.getName();
 
     HTableDescriptor htd_copy =
-      HBaseTestUtil.createTestTableDescriptor("copy", TEST_FAMILY);
+        HBaseTestUtil.createTestTableDescriptor("copy", TEST_FAMILY);
     admin.createTable(htd_copy);
     copy = htd_copy.getName();
 
@@ -97,13 +100,13 @@ public class TestCopyTable {
   @Test
   public void testCopyTable() throws Exception {
     sh.exec(copyTableCmd + " --new.name=" + new String(copy) +
-      " " + new String(orig));
+        " " + new String(orig));
     logError(sh);
     assertTrue(sh.getRet() == 0);
 
     String origDigest = HBaseTestUtil.checksumRows(origTable);
     String copyDigest = HBaseTestUtil.checksumRows(copyTable);
     assertTrue("Original and copy tables contain different data",
-               origDigest.equals(copyDigest));
+        origDigest.equals(copyDigest));
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
index c8391ac..23d7c3b 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
@@ -56,7 +56,7 @@ class TestHBaseBalancer {
     logError(sh)
     assertTrue(sh.getRet() == 0)
     assertTrue("balance_switch failed switching to true",
-               sh.getOut().toString().indexOf("true") != -1)
+      sh.getOut().toString().indexOf("true") != -1)
 
     // Return balancer switch to original state, and verify its
     // previous state to be false.
@@ -64,6 +64,6 @@ class TestHBaseBalancer {
     logError(sh)
     assertTrue(sh.getRet() == 0)
     assertTrue("balance_switch failed switching to false",
-               sh.getOut().toString().indexOf("false") != -1)
+      sh.getOut().toString().indexOf("false") != -1)
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
index 6b63d3c..fba1d77 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
@@ -42,7 +42,7 @@ class TestHBaseCompression {
   static void setUp() {
     conf = new Configuration();
     conf.addResource('mapred-site.xml');
-    HADOOP_OPTIONS = 
+    HADOOP_OPTIONS =
       "-fs ${conf.get('fs.default.name')} -jt ${conf.get('mapred.job.tracker')}";
     sh.exec("whoami");
     String user = sh.out[0];
@@ -51,7 +51,7 @@ class TestHBaseCompression {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs $HADOOP_OPTIONS -rmr -skipTrash $OUTPUT");
       assertTrue("Deletion of previous $OUTPUT from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
     sh.exec("hadoop fs $HADOOP_OPTIONS -mkdir $OUTPUT");
     assertTrue("Could not create $OUTPUT directory", sh.getRet() == 0);
@@ -63,14 +63,14 @@ class TestHBaseCompression {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs $HADOOP_OPTIONS -rmr -skipTrash $OUTPUT");
       assertTrue("Deletion of $OUTPUT from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
   }
 
   void _testCompression(String codec) {
     // workaround for hbase; set HBASE_LIBRARY_PATH
     sh.exec("export HBASE_LIBRARY_PATH=$JAVA_LIBRARY_PATH",
-            "hbase $TEST $HDFS_PATH/testfile.$codec $codec");
+      "hbase $TEST $HDFS_PATH/testfile.$codec $codec");
     assertTrue("test failed with codec: $codec", sh.getRet() == 0);
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
index 2ea01db..14c20f2 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
@@ -113,7 +113,7 @@ class TestHBaseImportExport {
     String origDigest = HBaseTestUtil.checksumRows(origTable)
     String exportDigest = HBaseTestUtil.checksumRows(exportTable)
     assertTrue("Original and exported tables contain different data",
-               origDigest.equals(exportDigest))
+      origDigest.equals(exportDigest))
   }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
index c542b26..3c42937 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
@@ -30,12 +30,12 @@ class TestHBasePigSmoke {
 
   private static String extra_jars =
     System.getProperty("org.apache.bigtop.itest.hbase.smoke.TestHBasePigSmoke.extra_jars",
-                       "");
+      "");
   private static String register_clause = "";
   private static String tmp = "TestHBasePigSmoke-${(new Date().getTime())}";
-  private static String TABLE="smoke-${tmp}";
-  private static String FAM1='family1';
-  private static String FAM2='family2';
+  private static String TABLE = "smoke-${tmp}";
+  private static String FAM1 = 'family1';
+  private static String FAM2 = 'family2';
 
   private static Shell shHBase = new Shell('hbase shell');
   private static Shell shPig = new Shell('pig');
@@ -50,32 +50,34 @@ class TestHBasePigSmoke {
   @BeforeClass
   static void setUp() {
     shHBase.exec("create '$TABLE', '$FAM1', '$FAM2'",
-                 "describe '$TABLE'",
-                 "quit\n");
+      "describe '$TABLE'",
+      "quit\n");
     assertEquals("Creating of the ${TABLE} failed",
-                 0, shHBase.ret);
+      0, shHBase.ret);
   }
 
   @AfterClass
   static void tearDown() {
     shHBase.exec("disable '$TABLE'",
-                 "drop '$TABLE'",
-                 "quit\n");
+      "drop '$TABLE'",
+      "quit\n");
 
     sh.exec("hadoop fs -rmr $TABLE");
   }
 
   @Ignore("BIGTOP-219")
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void Pig2HBase() {
     def script = "\n";
 
-    (1..ROW_CNT).each { script <<= String.format('%020d %d %s\n', it, it, 'localhost') }
+    (1..ROW_CNT).each {
+      script <<= String.format('%020d %d %s\n', it, it, 'localhost')
+    }
 
     sh.exec("hadoop dfs -mkdir $TABLE",
-            "hadoop dfs -put <(cat << __EOT__${script}__EOT__) ${TABLE}/data");
+      "hadoop dfs -put <(cat << __EOT__${script}__EOT__) ${TABLE}/data");
     assertEquals("Can't copy data to HDFS",
-                 0, sh.ret);
+      0, sh.ret);
 
     shPig.exec("""
       ${register_clause}
@@ -85,15 +87,15 @@ class TestHBasePigSmoke {
       quit
       """);
     assertEquals("Failed loading data via PIG",
-                 0, shPig.ret);
+      0, shPig.ret);
 
     shHBase.exec("scan '$TABLE'",
-                 "quit\n");
+      "quit\n");
     assertTrue("Scanning the table returned wrong # of rows",
-               (shHBase.out.get(shHBase.out.size() - 3) =~ "^$ROW_CNT row.s. in .* seconds").find());
+      (shHBase.out.get(shHBase.out.size() - 3) =~ "^$ROW_CNT row.s. in .* seconds").find());
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   @Ignore("BIGTOP-219")
   public void HBase2Pig() {
     def script = "\n";
@@ -116,6 +118,6 @@ class TestHBasePigSmoke {
 
     sh.exec("hadoop fs -cat $TABLE/pig/part* | wc -l");
     assertEquals("Scanning the PIG output returned wrong # of rows",
-                 ROW_CNT, sh.out.get(0).toInteger());
+      ROW_CNT, sh.out.get(0).toInteger());
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
index 50bcf42..b32705c 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
@@ -49,7 +49,7 @@ public class TestHBaseSmoke {
     HBaseAdmin admin = new HBaseAdmin(conf);
 
     HTableDescriptor htd =
-      HBaseTestUtil.createTestTableDescriptor("testSimplePutGet", TEST_FAMILY);
+        HBaseTestUtil.createTestTableDescriptor("testSimplePutGet", TEST_FAMILY);
     admin.createTable(htd);
 
     byte[] tableName = htd.getName();

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
index 1a0ed46..32a7d0c 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+
 import org.junit.AfterClass;
 import org.junit.Test;
 import org.junit.Ignore;
@@ -53,28 +54,32 @@ public class TestHFileOutputFormat {
   private static final int ROWSPERSPLIT = 1024;
 
   private static final byte[][] FAMILIES =
-    { Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
-      Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
+      {Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
+          Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
 
   private static final String HBASE_HOME = System.getenv("HBASE_HOME");
   private static final String HBASE_CONF_DIR = System.getenv("HBASE_CONF_DIR");
+
   static {
     assertNotNull("HBASE_HOME has to be set to run this test",
         HBASE_HOME);
     assertNotNull("HBASE_CONF_DIR has to be set to run this test",
         HBASE_CONF_DIR);
   }
+
   private static String hbase_jar =
-    JarContent.getJarName(HBASE_HOME, "hbase-.*(?<!tests).jar");
+      JarContent.getJarName(HBASE_HOME, "hbase-.*(?<!tests).jar");
   private static String hbase_tests_jar =
-    JarContent.getJarName(HBASE_HOME, "hbase-.*tests.jar");
+      JarContent.getJarName(HBASE_HOME, "hbase-.*tests.jar");
   private static URL incrload_jar_url =
-    JarContent.getJarURL(org.apache.bigtop.itest.hbase.smoke.IncrementalPELoad.class);
+      JarContent.getJarURL(org.apache.bigtop.itest.hbase.smoke.IncrementalPELoad.class);
+
   static {
     assertNotNull("Can't find hbase.jar", hbase_jar);
     assertNotNull("Can't find hbase-tests.jar", hbase_tests_jar);
     assertNotNull("Can't find jar containing IncrementalPELoad class", incrload_jar_url);
   }
+
   private static final String HBASE_JAR = HBASE_HOME + "/" + hbase_jar;
   private static final String HBASE_TESTS_JAR = HBASE_HOME + "/" + hbase_tests_jar;
   private static final String ZOOKEEPER_JAR = HBASE_HOME + "/lib/zookeeper.jar";
@@ -102,7 +107,7 @@ public class TestHFileOutputFormat {
     doIncrementalLoadTest("testMRIncrementalLoadWithSplit", true);
   }
 
-  private byte [][] generateRandomSplitKeys(int numKeys) {
+  private byte[][] generateRandomSplitKeys(int numKeys) {
     Random random = new Random();
     byte[][] ret = new byte[numKeys][];
     for (int i = 0; i < numKeys; i++) {
@@ -114,7 +119,7 @@ public class TestHFileOutputFormat {
   private void doIncrementalLoadTest(String testName, boolean shouldChangeRegions)
       throws Exception {
     FileSystem fs = HBaseTestUtil.getClusterFileSystem();
-    Path testDir =  HBaseTestUtil.getMROutputDir(testName);
+    Path testDir = HBaseTestUtil.getMROutputDir(testName);
     byte[][] splitKeys = generateRandomSplitKeys(4);
 
     Configuration conf = HBaseConfiguration.create();
@@ -165,7 +170,7 @@ public class TestHFileOutputFormat {
     // Ensure data shows up
     int expectedRows = NMapInputFormat.getNumMapTasks(conf) * ROWSPERSPLIT;
     assertEquals("LoadIncrementalHFiles should put expected data in table",
-                 expectedRows, HBaseTestUtil.countRows(table));
+        expectedRows, HBaseTestUtil.countRows(table));
     Scan scan = new Scan();
     ResultScanner results = table.getScanner(scan);
     int count = 0;
@@ -180,12 +185,12 @@ public class TestHFileOutputFormat {
     }
     results.close();
     String tableDigestBefore = HBaseTestUtil.checksumRows(table);
-            
+
     // Cause regions to reopen
     admin.disableTable(TABLE_NAME);
     admin.enableTable(TABLE_NAME);
     assertEquals("Data should remain after reopening of regions",
-                 tableDigestBefore, HBaseTestUtil.checksumRows(table));
+        tableDigestBefore, HBaseTestUtil.checksumRows(table));
 
     // cleanup
     // - disable and drop table
@@ -202,7 +207,7 @@ public class TestHFileOutputFormat {
 
   private void runIncrementalPELoad(String table, String outDir) {
     sh.exec("export HADOOP_CLASSPATH=" + HBASE_CONF_DIR + ":" + HBASE_JAR + ":" + HBASE_TESTS_JAR + ":" + ZOOKEEPER_JAR,
-            "hadoop jar " + INCRLOAD_JAR + " " + INCRLOAD +
+        "hadoop jar " + INCRLOAD_JAR + " " + INCRLOAD +
             " -libjars " + HBASE_JAR + "," + HBASE_TESTS_JAR +
             " " + table + " " + outDir);
     assertEquals("MR job failed", 0, sh.getRet());

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
index 09cf4b8..d34ab8f 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
@@ -53,7 +53,7 @@ public class TestImportTsv {
   private static final String HBASE_HOME = System.getenv("HBASE_HOME");
   static {
     assertNotNull("HBASE_HOME has to be set to run this test",
-        HBASE_HOME);
+      HBASE_HOME);
   }
   private static String hbase_jar =
     JarContent.getJarName(HBASE_HOME, "hbase-.*(?<!tests).jar");
@@ -70,17 +70,17 @@ public class TestImportTsv {
     if (sh.getRet() != 0) {
       sh.exec("hadoop fs -mkdir $DATADIR1");
       assertTrue("Unable to create directory $DATADIR1",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
     sh.exec("hadoop fs -test -e $DATADIR2");
     if (sh.getRet() != 0) {
       sh.exec("hadoop fs -mkdir $DATADIR2");
       assertTrue("Unable to create directory $DATADIR2",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
     // load data into HDFS
     sh.exec("hadoop fs -put movies.tsv $DATADIR1/items",
-            "hadoop fs -put movies.psv $DATADIR2/items");
+      "hadoop fs -put movies.psv $DATADIR2/items");
     assertTrue("setup failed", sh.getRet() == 0);
   }
 
@@ -88,8 +88,8 @@ public class TestImportTsv {
   public static void cleanUp() {
     // delete data and junk from HDFS
     sh.exec("hadoop fs -rmr -skipTrash $DATADIR1",
-            "hadoop fs -rmr -skipTrash $DATADIR2",
-            "hadoop fs -rmr -skipTrash /user/$USER/partitions_*");
+      "hadoop fs -rmr -skipTrash $DATADIR2",
+      "hadoop fs -rmr -skipTrash /user/$USER/partitions_*");
     assertTrue("teardown failed", sh.getRet() == 0);
   }
 
@@ -126,7 +126,7 @@ public class TestImportTsv {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $OUTDIR");
       assertTrue("Deletion of $OUTDIR from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
index 4192095..1adfebd 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
@@ -28,16 +28,18 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
+
 import static org.junit.Assert.assertEquals;
+
 import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestLoadIncrementalHFiles {
   private static final byte[] FAMILY = Bytes.toBytes("f1");
   private static final byte[] QUALIFIER = Bytes.toBytes("q1");
-  private static final byte[][] SPLIT_KEYS = new byte[][] {
-    Bytes.toBytes("ddd"),
-    Bytes.toBytes("ppp")
+  private static final byte[][] SPLIT_KEYS = new byte[][]{
+      Bytes.toBytes("ddd"),
+      Bytes.toBytes("ppp")
   };
   private static Shell sh = new Shell("/bin/bash -s");
 
@@ -48,10 +50,10 @@ public class TestLoadIncrementalHFiles {
   @Test
   public void testSimpleLoad() throws Exception {
     runTest("testSimpleLoad",
-        new byte[][][] {
-          new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") },
-          new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
-    });
+        new byte[][][]{
+            new byte[][]{Bytes.toBytes("aaaa"), Bytes.toBytes("cccc")},
+            new byte[][]{Bytes.toBytes("ddd"), Bytes.toBytes("ooo")},
+        });
   }
 
   /**
@@ -61,10 +63,10 @@ public class TestLoadIncrementalHFiles {
   @Test
   public void testRegionCrossingLoad() throws Exception {
     runTest("testRegionCrossingLoad",
-        new byte[][][] {
-          new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
-          new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
-    });
+        new byte[][][]{
+            new byte[][]{Bytes.toBytes("aaaa"), Bytes.toBytes("eee")},
+            new byte[][]{Bytes.toBytes("fff"), Bytes.toBytes("zzz")},
+        });
   }
 
   private void chmod(String uri) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
index 0aaffb3..eeaab24 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
@@ -41,7 +41,7 @@ public class Putter {
     Put put = null;
     if (result != null) {
       NavigableMap<byte[], NavigableMap<byte[], byte[]>> cfmap =
-        result.getNoVersionMap();
+          result.getNoVersionMap();
 
       if (result.getRow() != null && cfmap != null) {
         put = new Put(result.getRow());
@@ -70,7 +70,7 @@ public class Putter {
   }
 
   public static int doScanAndPut(HTable table, int val, boolean autoflush)
-    throws IOException {
+      throws IOException {
     Scan s = new Scan();
     byte[] start = {};
     byte[] stop = {};
@@ -78,7 +78,7 @@ public class Putter {
     s.setStartRow(start);
     s.setStopRow(stop);
     SingleColumnValueFilter filter = new SingleColumnValueFilter(
-      Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
+        Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
     s.setFilter(filter);
 
     table.setAutoFlush(autoflush);
@@ -95,7 +95,7 @@ public class Putter {
   public static void main(String argv[]) throws IOException {
     if (argv.length < 2) {
       System.err.println("usage: " + Putter.class.getSimpleName() +
-                         " <table> <value>");
+          " <table> <value>");
       System.err.println(" <value>: a numeric value [0,500)");
       System.exit(1);
     }
@@ -112,7 +112,7 @@ public class Putter {
     }
     Configuration conf = HBaseConfiguration.create();
 
-    byte [] tableName = Bytes.toBytes(argv[0]);
+    byte[] tableName = Bytes.toBytes(argv[0]);
     int val = Integer.parseInt(argv[1]);
     HTable table = new HTable(conf, tableName);
     for (int i = 0; i < loops; i++) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
index b6a7e2d..cfe5cb5 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
@@ -39,14 +39,14 @@ import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.Multimap;
 
 /**
- * This program scans a table a configurable number of times. Uses 
+ * This program scans a table a configurable number of times. Uses
  * the table record reader.
  */
 public class Scanner {
   public static final Log LOG = LogFactory.getLog(Scanner.class);
 
   public static int doScan(HTable table, int val) throws IOException,
-    InterruptedException {
+      InterruptedException {
     Scan s = new Scan();
     byte[] start = {};
     byte[] stop = {};
@@ -54,7 +54,7 @@ public class Scanner {
     s.setStartRow(start);
     s.setStopRow(stop);
     SingleColumnValueFilter filter = new SingleColumnValueFilter(
-      Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
+        Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
     s.setFilter(filter);
 
     // Keep track of gathered elements.
@@ -70,7 +70,7 @@ public class Scanner {
       }
 
       NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long,
-        byte[]>>> columnFamilyMap = r.getMap();
+          byte[]>>> columnFamilyMap = r.getMap();
 
       // Output time to show if flush related.
       String k = Bytes.toStringBinary(r.getRow());
@@ -85,7 +85,7 @@ public class Scanner {
     }
 
     System.out.println("scan items counted: " + cnt + " for scan " +
-      s.toString() + " with filter f1:qual == " + Bytes.toString(value));
+        s.toString() + " with filter f1:qual == " + Bytes.toString(value));
 
     // Print out dupes.
     int dupes = 0;
@@ -104,13 +104,13 @@ public class Scanner {
   public static void main(String argv[]) throws IOException {
     if (argv.length < 2) {
       System.err.println("usage: " + Scanner.class.getSimpleName() +
-        " <table> <value>");
+          " <table> <value>");
       System.err.println(" <value>: a numeric value [0,500)");
       System.exit(1);
     }
     Configuration conf = HBaseConfiguration.create();
 
-    byte [] tableName = Bytes.toBytes(argv[0]);
+    byte[] tableName = Bytes.toBytes(argv[0]);
     int val = Integer.parseInt(argv[1]);
     int loops = 1;
     for (int i = 1; i < argv.length; i++) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
index fec28be..468b702 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
@@ -49,11 +49,11 @@ import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
  * multiple column families sometimes get split into two rows.
  */
 public class TestConcurrentScanAndPut {
-  public static Shell scanSh = new Shell( "/bin/bash -s" );
-  public static Shell putSh = new Shell( "/bin/bash -s" );
+  public static Shell scanSh = new Shell("/bin/bash -s");
+  public static Shell putSh = new Shell("/bin/bash -s");
 
   public static HBaseAdmin admin;
-  public static byte [] tableName;
+  public static byte[] tableName;
   public static String putter_pid;
 
   public static int scannerLoops;
@@ -61,10 +61,10 @@ public class TestConcurrentScanAndPut {
 
   @BeforeClass
   public static void setUp() throws ClassNotFoundException,
-                                    InterruptedException, IOException {
+      InterruptedException, IOException {
     System.out.println("Unpacking resources");
-    JarContent.unpackJarContainer(Scanner.class, "." , null);
-    JarContent.unpackJarContainer(Putter.class, "." , null);
+    JarContent.unpackJarContainer(Scanner.class, ".", null);
+    JarContent.unpackJarContainer(Putter.class, ".", null);
 
     Configuration conf = HBaseConfiguration.create();
     try {
@@ -73,9 +73,9 @@ public class TestConcurrentScanAndPut {
       System.err.println("Hbase is not up. Bailing out.");
       System.exit(1);
     }
-    
+
     tableName =
-      Bytes.toBytes(new String(HBaseTestUtil.getTestTableName("concurrentScanAndPut")));
+        Bytes.toBytes(new String(HBaseTestUtil.getTestTableName("concurrentScanAndPut")));
     HTableDescriptor htd = new HTableDescriptor(tableName);
     for (int i = 0; i < 10; i++) {
       htd.addFamily(new HColumnDescriptor("f" + i));
@@ -97,14 +97,14 @@ public class TestConcurrentScanAndPut {
       for (int j = 0; j < 10; j++) {
         String value = String.format("%010d", rnd.nextInt(500));
         p.add(Bytes.toBytes("f" + j),
-              Bytes.toBytes("qual"),
-              Bytes.toBytes(value));
+            Bytes.toBytes("qual"),
+            Bytes.toBytes(value));
         String bigvalue = String.format("%0100d%0100d%0100d%0100d%0100d" +
-                                        "%0100d%0100d%0100d%0100d%0100d",
-                                        i, i, i, i, i, i, i, i, i, i);
+            "%0100d%0100d%0100d%0100d%0100d",
+            i, i, i, i, i, i, i, i, i, i);
         p.add(Bytes.toBytes("f" + j),
-              Bytes.toBytes("data"),
-              Bytes.toBytes(bigvalue));
+            Bytes.toBytes("data"),
+            Bytes.toBytes(bigvalue));
       }
       puts.add(p);
       if (i % batch == (batch - 1)) {
@@ -119,14 +119,14 @@ public class TestConcurrentScanAndPut {
 
     try {
       scannerLoops = Integer.parseInt(System.getProperty(
-                                      "concurrentScanAndPut.scanner.loops"));
+          "concurrentScanAndPut.scanner.loops"));
     } catch (NumberFormatException e) {
       scannerLoops = 100;
     }
 
     try {
       putterLoops = Integer.parseInt(System.getProperty(
-                                     "concurrentScanAndPut.putter.loops"));
+          "concurrentScanAndPut.putter.loops"));
     } catch (NumberFormatException e) {
       putterLoops = 100;
     }
@@ -147,20 +147,20 @@ public class TestConcurrentScanAndPut {
     String tableNameStr = Bytes.toString(tableName);
     System.out.println("Starting puts to test table " + tableNameStr);
     putSh.exec("(HBASE_CLASSPATH=. " +
-               "hbase org.apache.bigtop.itest.hbase.system.Putter " +
-               tableNameStr + " 13 -l " + putterLoops +
-               " > /dev/null 2>&1 & echo $! ) 2> /dev/null");
+        "hbase org.apache.bigtop.itest.hbase.system.Putter " +
+        tableNameStr + " 13 -l " + putterLoops +
+        " > /dev/null 2>&1 & echo $! ) 2> /dev/null");
     putter_pid = putSh.getOut().get(0);
 
     System.out.println("Starting concurrent scans of test table " +
-                       tableNameStr);
+        tableNameStr);
     scanSh.exec("HBASE_CLASSPATH=. hbase " +
-                "org.apache.bigtop.itest.hbase.system.Scanner " +
-                tableNameStr + " 13 -l " + scannerLoops + " 2>/dev/null");
+        "org.apache.bigtop.itest.hbase.system.Scanner " +
+        tableNameStr + " 13 -l " + scannerLoops + " 2>/dev/null");
 
     int splitRows = scanSh.getRet();
     System.out.println("Split rows: " + splitRows);
     assertTrue("Rows were split when scanning table with concurrent writes",
-               splitRows == 0);
+        splitRows == 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
index b15b20d..947f59e 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
@@ -60,14 +60,14 @@ import org.junit.Test;
 
 import com.google.common.collect.Lists;
 
-public class TestLoadAndVerify  extends Configured implements Tool {
+public class TestLoadAndVerify extends Configured implements Tool {
   private static final String TEST_NAME = "TestLoadAndVerify";
   private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
   private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1");
 
   private static final String NUM_TO_WRITE_KEY =
-    "loadmapper.num_to_write";
-  private static final long NUM_TO_WRITE_DEFAULT = 100*1000;
+      "loadmapper.num_to_write";
+  private static final long NUM_TO_WRITE_DEFAULT = 100 * 1000;
 
   private static final String TABLE_NAME_KEY = "loadmapper.table";
   private static final String TABLE_NAME_DEFAULT = "table";
@@ -89,25 +89,24 @@ public class TestLoadAndVerify  extends Configured implements Tool {
   /**
    * Converts a "long" value between endian systems.
    * Borrowed from Apache Commons IO
+   *
    * @param value value to convert
    * @return the converted value
    */
-  public static long swapLong(long value)
-  {
+  public static long swapLong(long value) {
     return
-      ( ( ( value >> 0 ) & 0xff ) << 56 ) +
-      ( ( ( value >> 8 ) & 0xff ) << 48 ) +
-      ( ( ( value >> 16 ) & 0xff ) << 40 ) +
-      ( ( ( value >> 24 ) & 0xff ) << 32 ) +
-      ( ( ( value >> 32 ) & 0xff ) << 24 ) +
-      ( ( ( value >> 40 ) & 0xff ) << 16 ) +
-      ( ( ( value >> 48 ) & 0xff ) << 8 ) +
-      ( ( ( value >> 56 ) & 0xff ) << 0 );
+        (((value >> 0) & 0xff) << 56) +
+            (((value >> 8) & 0xff) << 48) +
+            (((value >> 16) & 0xff) << 40) +
+            (((value >> 24) & 0xff) << 32) +
+            (((value >> 32) & 0xff) << 24) +
+            (((value >> 40) & 0xff) << 16) +
+            (((value >> 48) & 0xff) << 8) +
+            (((value >> 56) & 0xff) << 0);
   }
 
   public static class LoadMapper
-      extends Mapper<NullWritable, NullWritable, NullWritable, NullWritable>
-  {
+      extends Mapper<NullWritable, NullWritable, NullWritable, NullWritable> {
     private long recordsToWrite;
     private HTable table;
     private Configuration conf;
@@ -122,10 +121,10 @@ public class TestLoadAndVerify  extends Configured implements Tool {
     public void setup(Context context) throws IOException {
       conf = context.getConfiguration();
       recordsToWrite = conf.getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT);
-      byte [] tableName = Bytes.toBytes(conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT));
+      byte[] tableName = Bytes.toBytes(conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT));
       numBackReferencesPerRow = conf.getInt(NUM_BACKREFS_KEY, NUM_BACKREFS_DEFAULT);
       table = new HTable(conf, tableName);
-      table.setWriteBufferSize(4*1024*1024);
+      table.setWriteBufferSize(4 * 1024 * 1024);
       table.setAutoFlush(false);
 
       String taskId = conf.get("mapred.task.id");
@@ -146,15 +145,15 @@ public class TestLoadAndVerify  extends Configured implements Tool {
     }
 
     @Override
-    protected void map(NullWritable key, NullWritable value, 
-        Context context) throws IOException, InterruptedException {
+    protected void map(NullWritable key, NullWritable value,
+                       Context context) throws IOException, InterruptedException {
 
       String suffix = "/" + shortTaskId;
       byte[] row = Bytes.add(new byte[8], Bytes.toBytes(suffix));
 
-      int BLOCK_SIZE = (int)(recordsToWrite / 100);
+      int BLOCK_SIZE = (int) (recordsToWrite / 100);
 
-      for (long i = 0; i < recordsToWrite;) {
+      for (long i = 0; i < recordsToWrite; ) {
         long blockStart = i;
         for (long idxInBlock = 0;
              idxInBlock < BLOCK_SIZE && i < recordsToWrite;
@@ -198,8 +197,8 @@ public class TestLoadAndVerify  extends Configured implements Tool {
       BytesWritable bwVal = new BytesWritable();
       for (KeyValue kv : value.list()) {
         if (Bytes.compareTo(TEST_QUALIFIER, 0, TEST_QUALIFIER.length,
-                            kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength()) == 0) {
-          context.write(bwKey, EMPTY);          
+            kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength()) == 0) {
+          context.write(bwKey, EMPTY);
         } else {
           bwVal.set(kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength());
           context.write(bwVal, bwKey);
@@ -218,7 +217,7 @@ public class TestLoadAndVerify  extends Configured implements Tool {
 
     @Override
     protected void reduce(BytesWritable referredRow, Iterable<BytesWritable> referrers,
-        VerifyReducer.Context ctx) throws IOException, InterruptedException {
+                          VerifyReducer.Context ctx) throws IOException, InterruptedException {
       boolean gotOriginalRow = false;
       int refCount = 0;
 
@@ -234,7 +233,7 @@ public class TestLoadAndVerify  extends Configured implements Tool {
 
       if (!gotOriginalRow) {
         String parsedRow = makeRowReadable(referredRow.getBytes(), referredRow.getLength());
-        String binRow = Bytes.toStringBinary(referredRow.getBytes(), 0, referredRow.getLength()); 
+        String binRow = Bytes.toStringBinary(referredRow.getBytes(), 0, referredRow.getLength());
         ctx.write(new Text(binRow), new Text(parsedRow));
       }
     }
@@ -248,9 +247,9 @@ public class TestLoadAndVerify  extends Configured implements Tool {
   }
 
   private void doLoad(Configuration conf, HTableDescriptor htd) throws Exception {
-    Path outputDir = 
-      new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
-          "load-output");
+    Path outputDir =
+        new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
+            "load-output");
 
     NMapInputFormat.setNumMapTasks(conf, NUM_TASKS);
     conf.set(TABLE_NAME_KEY, htd.getNameAsString());
@@ -271,9 +270,9 @@ public class TestLoadAndVerify  extends Configured implements Tool {
   }
 
   private void doVerify(Configuration conf, HTableDescriptor htd) throws Exception {
-    Path outputDir = 
-      new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
-          "verify-output");
+    Path outputDir =
+        new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
+            "verify-output");
 
     Job job = new Job(conf);
     job.setJarByClass(this.getClass());
@@ -299,7 +298,7 @@ public class TestLoadAndVerify  extends Configured implements Tool {
   @Test
   public void testLoadAndVerify() throws Exception {
     HTableDescriptor htd =
-      HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);
+        HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);
     HBaseAdmin admin = HBaseTestUtil.getAdmin();
     assertNotNull("HBaseAdmin shouldn't be null", admin);
     int numPreCreate = 40;
@@ -315,12 +314,12 @@ public class TestLoadAndVerify  extends Configured implements Tool {
     deleteTable(admin, htd);
   }
 
-  private void deleteTable(HBaseAdmin admin, HTableDescriptor htd) 
-    throws IOException, InterruptedException {
+  private void deleteTable(HBaseAdmin admin, HTableDescriptor htd)
+      throws IOException, InterruptedException {
     // Use disableTestAsync because disable can take a long time to complete
-    System.out.print("Disabling table " + htd.getNameAsString() +" ");
+    System.out.print("Disabling table " + htd.getNameAsString() + " ");
     admin.disableTableAsync(htd.getName());
-    
+
     long start = System.currentTimeMillis();
     // NOTE tables can be both admin.isTableEnabled=false and 
     // isTableDisabled=false, when disabling must use isTableDisabled!
@@ -329,11 +328,11 @@ public class TestLoadAndVerify  extends Configured implements Tool {
       Thread.sleep(1000);
     }
     long delta = System.currentTimeMillis() - start;
-    System.out.println(" " + delta +" ms");
-    System.out.println("Deleting table " + htd.getNameAsString() +" ");
+    System.out.println(" " + delta + " ms");
+    System.out.println("Deleting table " + htd.getNameAsString() + " ");
     admin.deleteTable(htd.getName());
   }
-  
+
   public void usage() {
     System.err.println(this.getClass().getSimpleName() + " [-Doptions] <load|verify|loadAndVerify>");
     System.err.println("  Loads a table with row dependencies and verifies the dependency chains");
@@ -345,7 +344,7 @@ public class TestLoadAndVerify  extends Configured implements Tool {
     System.err.println("  -Dloadmapper.numPresplits=<n>    Number of presplit regions to start with (default 40)");
     System.err.println("  -Dverify.scannercaching=<n>      Number hbase scanner caching rows to read (default 50)");
   }
-  
+
   public int run(String argv[]) throws Exception {
     if (argv.length < 1 || argv.length > 1) {
       usage();
@@ -354,16 +353,16 @@ public class TestLoadAndVerify  extends Configured implements Tool {
 
     boolean doLoad = false;
     boolean doVerify = false;
-    boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter",true);
+    boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter", true);
     int numPresplits = getConf().getInt("loadmapper.numPresplits", 40);
 
     if (argv[0].equals("load")) {
       doLoad = true;
     } else if (argv[0].equals("verify")) {
-      doVerify= true;
+      doVerify = true;
     } else if (argv[0].equals("loadAndVerify")) {
-      doLoad=true;
-      doVerify= true;
+      doLoad = true;
+      doVerify = true;
     } else {
       System.err.println("Invalid argument " + argv[0]);
       usage();
@@ -372,7 +371,7 @@ public class TestLoadAndVerify  extends Configured implements Tool {
 
     // create HTableDescriptor for specified table
     String table = getConf().get(TABLE_NAME_KEY, "");
-    HTableDescriptor htd ;
+    HTableDescriptor htd;
     if ("".equals(table)) {
       // Just like the unit test.
       htd = HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
index dcad25d..2132a43 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
@@ -35,39 +35,40 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 /**
-  This test contains 3 tests:
-  
-  1) Unload the region server hosting -ROOT-. Check that -ROOT- region
-  is accessible after a configurable period of time.
-
-  2) The test above for the .META. region.
-
-  3) Stop the region server(s) hosting the -ROOT- and .META. tables
-  and verify the regions are moved to other region server(s).
-
-  This test does not restart the stopped region server, so users will
-  have to manually restart the region server.
-
-  The third test is designed for clusters with more than two region servers.
-
-  Optional arguments:
-  -Droot.timeout.ms=<milliseconds to wait while trying to find -ROOT->
-  -Dmeta.timeout.ms=<milliseconds to wait while trying to find .META.>
-  -Dwait.after.move.ms=<milliseconds to wait after moving -ROOT- or .META.>
-*/
+ * This test contains 3 tests:
+ * <p/>
+ * 1) Unload the region server hosting -ROOT-. Check that -ROOT- region
+ * is accessible after a configurable period of time.
+ * <p/>
+ * 2) The test above for the .META. region.
+ * <p/>
+ * 3) Stop the region server(s) hosting the -ROOT- and .META. tables
+ * and verify the regions are moved to other region server(s).
+ * <p/>
+ * This test does not restart the stopped region server, so users will
+ * have to manually restart the region server.
+ * <p/>
+ * The third test is designed for clusters with more than two region servers.
+ * <p/>
+ * Optional arguments:
+ * -Droot.timeout.ms=<milliseconds to wait while trying to find -ROOT->
+ * -Dmeta.timeout.ms=<milliseconds to wait while trying to find .META.>
+ * -Dwait.after.move.ms=<milliseconds to wait after moving -ROOT- or .META.>
+ */
 
 public class TestMoveRootMetaRegions {
   private static final String HBASE_HOME = System.getenv("HBASE_HOME");
+
   static {
     Assert.assertNotNull("HBASE_HOME has to be set to run this test", HBASE_HOME);
   }
 
-  private static Shell sh = new Shell( "/bin/bash -s" );
+  private static Shell sh = new Shell("/bin/bash -s");
 
   private static String load_regionserver =
-    "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
+      "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
   private static String unload_regionserver =
-    "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
+      "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
 
   private static long meta_timeout_ms;
   private static long root_timeout_ms;
@@ -78,18 +79,18 @@ public class TestMoveRootMetaRegions {
   private static CatalogTracker ct;
 
   private static String meta_table =
-    Bytes.toStringBinary(TableName.META_TABLE_NAME.getName());
+      Bytes.toStringBinary(TableName.META_TABLE_NAME.getName());
 
   @BeforeClass
   public static void setUp() throws Exception {
     // Default timeout is 3 minutes.
     root_timeout_ms =
-      Integer.parseInt(System.getProperty("root.timeout.ms", "180000"));
+        Integer.parseInt(System.getProperty("root.timeout.ms", "180000"));
     meta_timeout_ms =
-      Integer.parseInt(System.getProperty("meta.timeout.ms", "180000"));
+        Integer.parseInt(System.getProperty("meta.timeout.ms", "180000"));
     // Default to 20 seconds.
     wait_after_move_ms =
-      Integer.parseInt(System.getProperty("wait.after.move.ms", "20000"));
+        Integer.parseInt(System.getProperty("wait.after.move.ms", "20000"));
 
     conf = HBaseConfiguration.create();
     admin = new HBaseAdmin(conf);
@@ -107,12 +108,12 @@ public class TestMoveRootMetaRegions {
     ct.stop();
   }
 
-  public static ServerName getMetaAddress() throws Exception{
+  public static ServerName getMetaAddress() throws Exception {
     return ct.waitForMeta(meta_timeout_ms);
   }
 
   @Test
-  public void unloadMetaRegionServer() throws Exception{
+  public void unloadMetaRegionServer() throws Exception {
     ServerName meta_address = getMetaAddress();
     String cmd = unload_regionserver + meta_address.getHostname();
     System.out.println("Unloading the region server hosting " + meta_table);
@@ -143,7 +144,7 @@ public class TestMoveRootMetaRegions {
 
     ServerName new_meta_address = getMetaAddress();
 
-    System.out.println(meta_table + " server address: " +  new_meta_address);
+    System.out.println(meta_table + " server address: " + new_meta_address);
     Assert.assertThat(meta_address, not(equalTo(new_meta_address)));
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
index 427b198..c8a7617 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
@@ -33,25 +33,25 @@ import org.apache.bigtop.itest.shell.Shell;
  * This program unloads and reloads region servers and checks that
  * regions do not get stuck in transition for too long. The region
  * servers are specified by hostname.
- * 
+ * <p/>
  * Required arguments:
  * -Dregionservers=<regionserver1>,<regionserver2>,...
- *
+ * <p/>
  * Optional arguments:
  * -Dload.iterations=<number of times to unload and load the region servers>
  * -Dtimeout.intervals=<number of times to wait for no regions in transition>
  * -Dtimeout.ms=<milliseconds to wait before checking for regions in transition>
  */
 public class TestRegionMover {
-  private static Shell sh = new Shell( "/bin/bash -s" );
+  private static Shell sh = new Shell("/bin/bash -s");
 
   // Commands to execute the region mover and get the detailed HBase status.
   private static String load_regionserver =
-    "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
+      "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
   private static String unload_regionserver =
-    "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
+      "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
   private static String hbase_status_detailed =
-    "echo \"status \'detailed\'\" | $HBASE_HOME/bin/hbase shell";
+      "echo \"status \'detailed\'\" | $HBASE_HOME/bin/hbase shell";
 
   // Number of times we unload/load the region servers.
   private static int load_iterations;
@@ -63,6 +63,7 @@ public class TestRegionMover {
   private static ArrayList<String> regionservers = new ArrayList<String>();
 
   private static final String HBASE_HOME = System.getenv("HBASE_HOME");
+
   static {
     assertNotNull("HBASE_HOME has to be set to run this test", HBASE_HOME);
   }
@@ -71,7 +72,7 @@ public class TestRegionMover {
   public static void setUp() throws InterruptedException {
     String region_servers = System.getProperty("regionservers", null);
     assertNotNull("Region server(s) must be specified to run this test",
-                  region_servers);
+        region_servers);
     StringTokenizer st = new StringTokenizer(region_servers, ",");
     while (st.hasMoreTokens()) {
       regionservers.add(st.nextToken());
@@ -79,8 +80,8 @@ public class TestRegionMover {
     System.out.println("Region servers to load/unload:\n" + regionservers);
 
     load_iterations = Integer.parseInt(System.getProperty("load.iterations", "10"));
-    timeout_intervals = Integer.parseInt(System.getProperty("timeout.intervals","20"));
-    timeout_ms = Integer.parseInt(System.getProperty("timeout.ms","20000"));
+    timeout_intervals = Integer.parseInt(System.getProperty("timeout.intervals", "20"));
+    timeout_ms = Integer.parseInt(System.getProperty("timeout.ms", "20000"));
   }
 
   @AfterClass
@@ -94,7 +95,7 @@ public class TestRegionMover {
       System.out.println("Wait interval: " + i);
       sh.exec(hbase_status_detailed);
       String status = sh.getOut().toString();
-      if(status.indexOf(" 0 regionsInTransition") != -1) {
+      if (status.indexOf(" 0 regionsInTransition") != -1) {
         System.out.println(" 0 regionsInTransition.");
         return;
       } else {
@@ -107,7 +108,7 @@ public class TestRegionMover {
   @Test
   public void testRegionMover() throws InterruptedException {
     System.out.println("Beginning unloading and loading of region servers " +
-                       load_iterations + " times each");
+        load_iterations + " times each");
     String cmd;
     for (int i = 0; i < load_iterations; i++) {
       for (String rs : regionservers) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
index fd2f398..4d718f6 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
@@ -43,9 +43,9 @@ import org.apache.hadoop.hbase.util.ChecksumType;
 
 public class HBaseTestUtil {
 
-  public static int BLOCKSIZE = 64*1024;
+  public static int BLOCKSIZE = 64 * 1024;
   public static String COMPRESSION =
-    Compression.Algorithm.NONE.getName();
+      Compression.Algorithm.NONE.getName();
 
   private static String getTestPrefix() {
     return String.valueOf(System.currentTimeMillis());
@@ -56,7 +56,7 @@ public class HBaseTestUtil {
   }
 
   public static HTableDescriptor createTestTableDescriptor(String testName,
-      byte[] familyName) {
+                                                           byte[] familyName) {
     byte[] tableName = getTestTableName(testName);
     HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor(familyName));
@@ -91,18 +91,17 @@ public class HBaseTestUtil {
       Configuration conf,
       FileSystem fs, Path path,
       byte[] family, byte[] qualifier,
-      byte[] startKey, byte[] endKey, int numRows) throws IOException
-  {
-      HFile.WriterFactory wf = HFile.getWriterFactory(conf, new CacheConfig(conf));
-      HFileContext hFileContext = new HFileContext();
-      wf.withFileContext(hFileContext);
-      wf.withComparator(KeyValue.COMPARATOR);
-      wf.withPath(fs, path);
+      byte[] startKey, byte[] endKey, int numRows) throws IOException {
+    HFile.WriterFactory wf = HFile.getWriterFactory(conf, new CacheConfig(conf));
+    HFileContext hFileContext = new HFileContext();
+    wf.withFileContext(hFileContext);
+    wf.withComparator(KeyValue.COMPARATOR);
+    wf.withPath(fs, path);
     HFile.Writer writer = wf.create();
     long now = System.currentTimeMillis();
     try {
       // subtract 2 since iterateOnSplits doesn't include boundary keys
-      for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, numRows-2)) {
+      for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, numRows - 2)) {
         KeyValue kv = new KeyValue(key, family, qualifier, now, key);
         writer.append(kv);
       }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy b/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
index ca609c7..2a143cb 100644
--- a/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
+++ b/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
@@ -58,7 +58,6 @@ public class TestHcatalogBasic {
     sh.exec("hadoop fs -rmr -skipTrash /user/hive/warehouse")
   }
 
-
   /**
    * Validate that the table created via hcat exists from Hive's world view
    */
@@ -76,14 +75,14 @@ public class TestHcatalogBasic {
     diff -u hcat_basic_describe.expected hive_hcat_basic_verify.actual
     """)
     assertEquals("hive couldn't detect the table created via hcat, return code: " + sh.ret,
-        0, sh.ret);
+      0, sh.ret);
 
     sh.exec("""
     hcat -e "DESCRIBE hcat_basic" > hcat_hcat_basic_verify.actual
     diff -u hcat_basic_describe.expected hcat_hcat_basic_verify.actual
     """)
     assertEquals("hcat couldn't detect the table created via hcat, return code: " + sh.ret,
-        0, sh.ret);
+      0, sh.ret);
 
     // Add a partition via hive
     sh.exec("hive -e \"ALTER TABLE hcat_basic ADD PARTITION (dt='2013-01-01')\"")

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
index 9cd40eb..78d7a7a 100644
--- a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
+++ b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
@@ -29,125 +29,125 @@ import org.apache.commons.logging.Log
 import org.apache.commons.logging.LogFactory
 
 public class TestHttpFs {
-    private static Log LOG = LogFactory.getLog(TestHttpFs.class)
- 
-    private static final String USERNAME = System.getProperty("user.name");
-    private static final String HTTPFS_PROXY = System.getenv('HTTPFS_PROXY');
-    static {
-        assertNotNull("HTTPFS_PROXY has to be set to run this test",
-            HTTPFS_PROXY);
-    }
-  
-    private static final String HTTPFS_PREFIX = "http://$HTTPFS_PROXY/webhdfs/v1";
-    private static final String HTTPFS_SUCCESS = "{\"boolean\":true}";
-    
-    private static final String DATA_DIR = System.getProperty("data.dir", "text-files");
-        
-    private static String testHttpFsFolder =  "/tmp/httpfssmoke-" + (new Date().getTime());
-    private static String testHttpFsFolderRenamed = "$testHttpFsFolder-renamed";
-    
-    private static Shell sh = new Shell("/bin/bash");
-    // it will used to cleanup directories, as they are created with via curl with user.name=$USERNAME
-    private static Shell shUSERNAME = new Shell("/bin/bash", USERNAME);
-
-    @BeforeClass
-    public static void setUp() {
-    }
+  private static Log LOG = LogFactory.getLog(TestHttpFs.class)
 
-    @AfterClass
-    public static void tearDown() {
-        // clean up of existing folders using USERNAME of user who created them via curl
-        shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolder");
-        if (shUSERNAME.getRet() == 0) {
-            shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
-            assertTrue("Deletion of previous testHttpFsFolder from HDFS failed",
-                shUSERNAME.getRet() == 0);
-        }
-        shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
-        if (shUSERNAME.getRet() == 0) {
-            shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
-            assertTrue("Deletion of previous testHttpFsFolderRenamed from HDFS failed",
-                shUSERNAME.getRet() == 0);
-        }
-    }
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String HTTPFS_PROXY = System.getenv('HTTPFS_PROXY');
+  static {
+    assertNotNull("HTTPFS_PROXY has to be set to run this test",
+      HTTPFS_PROXY);
+  }
+
+  private static final String HTTPFS_PREFIX = "http://$HTTPFS_PROXY/webhdfs/v1";
+  private static final String HTTPFS_SUCCESS = "{\"boolean\":true}";
+
+  private static final String DATA_DIR = System.getProperty("data.dir", "text-files");
+
+  private static String testHttpFsFolder = "/tmp/httpfssmoke-" + (new Date().getTime());
+  private static String testHttpFsFolderRenamed = "$testHttpFsFolder-renamed";
+
+  private static Shell sh = new Shell("/bin/bash");
+  // it will used to cleanup directories, as they are created with via curl with user.name=$USERNAME
+  private static Shell shUSERNAME = new Shell("/bin/bash", USERNAME);
+
+  @BeforeClass
+  public static void setUp() {
+  }
 
-    public void assertValueExists(List<String> values, String expected) {
-        boolean exists = false;
-        for (String value: values) {
-            if (expected.startsWith(value)) {
-                exists = true;
-            }
-        }
-        assertTrue(expected + " NOT found!", exists == true);
+  @AfterClass
+  public static void tearDown() {
+    // clean up of existing folders using USERNAME of user who created them via curl
+    shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolder");
+    if (shUSERNAME.getRet() == 0) {
+      shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
+      assertTrue("Deletion of previous testHttpFsFolder from HDFS failed",
+        shUSERNAME.getRet() == 0);
     }
-    
-    private void createDir(String dirname) {
-        sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$dirname?user.name=$USERNAME&op=MKDIRS'");
+    shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
+    if (shUSERNAME.getRet() == 0) {
+      shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
+      assertTrue("Deletion of previous testHttpFsFolderRenamed from HDFS failed",
+        shUSERNAME.getRet() == 0);
     }
+  }
 
-    @Test
-    public void testCreateDir() {
-        createDir(testHttpFsFolder)
-        assertTrue("curl command to create a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+  public void assertValueExists(List<String> values, String expected) {
+    boolean exists = false;
+    for (String value : values) {
+      if (expected.startsWith(value)) {
+        exists = true;
+      }
     }
+    assertTrue(expected + " NOT found!", exists == true);
+  }
 
-    @Test
-    public void testRenameDir() { 
-        createDir(testHttpFsFolder);
-        assertTrue("curl command to create a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=RENAME&destination=$testHttpFsFolderRenamed'");
-        assertTrue("curl command to rename a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-    }
+  private void createDir(String dirname) {
+    sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$dirname?user.name=$USERNAME&op=MKDIRS'");
+  }
 
-    @Test
-    public void testDeleteDir() {
-        createDir(testHttpFsFolder);
-        assertTrue("curl command to create a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        sh.exec("curl -i -X DELETE '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=DELETE'");
-        assertTrue("curl command to delete a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-    }
-    
-    @Test
-    public void testStatusDir() { 
-        createDir(testHttpFsFolder);
-        assertTrue("curl command to create a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        sh.exec("curl -i '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=GETFILESTATUS'");
-        assertTrue("curl command to create a dir failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        assertValueExists(sh.getOut(), "DIRECTORY");
-    }
+  @Test
+  public void testCreateDir() {
+    createDir(testHttpFsFolder)
+    assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+  }
+
+  @Test
+  public void testRenameDir() {
+    createDir(testHttpFsFolder);
+    assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=RENAME&destination=$testHttpFsFolderRenamed'");
+    assertTrue("curl command to rename a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+  }
+
+  @Test
+  public void testDeleteDir() {
+    createDir(testHttpFsFolder);
+    assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    sh.exec("curl -i -X DELETE '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=DELETE'");
+    assertTrue("curl command to delete a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+  }
+
+  @Test
+  public void testStatusDir() {
+    createDir(testHttpFsFolder);
+    assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    sh.exec("curl -i '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=GETFILESTATUS'");
+    assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    assertValueExists(sh.getOut(), "DIRECTORY");
+  }
+
+  @Test
+  public void testCreateFile() {
+    String filename = "helloworld.txt";
+    String filenameContent = 'Hello World!';
 
-    @Test
-    public void testCreateFile() {
-        String filename = "helloworld.txt";
-        String filenameContent = 'Hello World!';
-        
-        createDir(testHttpFsFolder);
-        assertTrue("curl command to create a dir failed", sh.getRet() == 0);
-        sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=CREATE'");
-        assertTrue("curl command to create a file failed", sh.getRet() == 0);
-        String datanodeLocation = null;
-        sh.getOut().each {
-            if (it.startsWith("Location:")) {
-                datanodeLocation = it.split(' ')[1];
-                return true;
-            }
-        }
-        LOG.debug("Datanode location: $datanodeLocation");
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        sh.exec("curl -i -T $DATA_DIR/$filename '$datanodeLocation' --header 'Content-Type:application/octet-stream'");
-        assertTrue("curl command to create a file failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        sh.exec("curl -i -L '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=OPEN'");
-        assertTrue("curl command to create a file failed", sh.getRet() == 0);
-        assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
-        assertValueExists(sh.getOut(), filenameContent);
+    createDir(testHttpFsFolder);
+    assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+    sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=CREATE'");
+    assertTrue("curl command to create a file failed", sh.getRet() == 0);
+    String datanodeLocation = null;
+    sh.getOut().each {
+      if (it.startsWith("Location:")) {
+        datanodeLocation = it.split(' ')[1];
+        return true;
+      }
     }
+    LOG.debug("Datanode location: $datanodeLocation");
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    sh.exec("curl -i -T $DATA_DIR/$filename '$datanodeLocation' --header 'Content-Type:application/octet-stream'");
+    assertTrue("curl command to create a file failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    sh.exec("curl -i -L '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=OPEN'");
+    assertTrue("curl command to create a file failed", sh.getRet() == 0);
+    assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+    assertValueExists(sh.getOut(), filenameContent);
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy b/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
index 50c0a46..4b5a46d 100644
--- a/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
@@ -29,14 +29,14 @@ public class TestHueSmoke {
   final static String loginURL = "${hueServer}/accounts/login/";
   final static String checkURL = "${hueServer}/debug/check_config";
   final static String creds = "username=admin&password=admin";
-  final static Map checkApps = [ "about"            : "<title>About Hue</title>",
-                                 "filebrowser/view" : "<title>File Browser</title>",
-                                 "help"             : "<title>Hue Help</title>",
-                                 // FIXME: HUE-10 "jobbrowser"       : "<title>Error</title>",
-                                 "jobsub"           : "<title>Job Designer</title>",
-                                 "useradmin"        : "<title>Hue Users</title>",
-                                 "beeswax"          : "<title>Hive Query</title>",
-                                 "oozie"            : "<title>Oozie App</title>" ];
+  final static Map checkApps = ["about": "<title>About Hue</title>",
+    "filebrowser/view": "<title>File Browser</title>",
+    "help": "<title>Hue Help</title>",
+    // FIXME: HUE-10 "jobbrowser"       : "<title>Error</title>",
+    "jobsub": "<title>Job Designer</title>",
+    "useradmin": "<title>Hue Users</title>",
+    "beeswax": "<title>Hive Query</title>",
+    "oozie": "<title>Oozie App</title>"];
 
   Shell sh = new Shell();
 
@@ -53,7 +53,7 @@ public class TestHueSmoke {
 
     sh.exec("curl -m 60 -b '${sessionId}' ${checkURL}");
     assertTrue("Global configuration check failed",
-               sh.getOut().grep( ~/.*All ok. Configuration check passed.*/ ).size() > 0);
+      sh.getOut().grep(~/.*All ok. Configuration check passed.*/).size() > 0);
     checkApps.each { app, expected ->
       sh.exec("curl -m 60 -b '${sessionId}' ${hueServer}/${app}/");
       if (sh.getOut().join(' ').indexOf(expected) == -1) {
@@ -61,6 +61,6 @@ public class TestHueSmoke {
       }
     }
     assertEquals("Application(s) ${failedApps} failed to respond",
-                 failedApps.size(), 0);
+      failedApps.size(), 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy b/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
index caf2dbc..0772a76 100644
--- a/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
+++ b/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
@@ -72,10 +72,10 @@ public class TestDFSIO {
 
   @Test
   public void testDFSIO() {
-    if(FailureVars.instance.getRunFailures()
-    || FailureVars.instance.getServiceRestart()
-    || FailureVars.instance.getServiceKill()
-    || FailureVars.instance.getNetworkShutdown()) {
+    if (FailureVars.instance.getRunFailures()
+      || FailureVars.instance.getServiceRestart()
+      || FailureVars.instance.getServiceKill()
+      || FailureVars.instance.getNetworkShutdown()) {
       runFailureThread();
     }
 
@@ -95,7 +95,7 @@ public class TestDFSIO {
     }
   }
 
-  private void executeCmd(String cmd, String expectedFile){
+  private void executeCmd(String cmd, String expectedFile) {
     sh.exec(cmd);
     logError(sh);
     assertTrue("Command " + cmd + " is unsuccessful", sh.getRet() == 0);


[5/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across test-artifacts

Posted by db...@apache.org.
BIGTOP-1601. cleanup whitespaces across test-artifacts


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/3e17db89
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/3e17db89
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/3e17db89

Branch: refs/heads/master
Commit: 3e17db895a0a4bdeacec6c0097e3be0cd93d49e0
Parents: bb86afb
Author: Dasha <da...@wandisco.com>
Authored: Wed Feb 11 22:13:35 2015 -0800
Committer: Dasha <da...@wandisco.com>
Committed: Wed Feb 11 22:13:35 2015 -0800

----------------------------------------------------------------------
 .../org/apache/bigtop/itest/Contract.java       |   1 +
 .../org/apache/bigtop/itest/JUnitUtils.groovy   |  32 +-
 .../org/apache/bigtop/itest/JarContent.groovy   |  26 +-
 .../apache/bigtop/itest/LogErrorsUtils.groovy   |  10 +-
 .../apache/bigtop/itest/ParameterSetter.java    |  60 +--
 .../org/apache/bigtop/itest/Property.java       |  19 +-
 .../apache/bigtop/itest/TestListUtils.groovy    |   2 +-
 .../org/apache/bigtop/itest/TestUtils.groovy    |   6 +-
 .../org/apache/bigtop/itest/Variable.java       |  11 +-
 .../itest/failures/AbstractFailure.groovy       |   7 +-
 .../itest/failures/FailureExecutor.groovy       |   9 +-
 .../bigtop/itest/failures/FailureVars.groovy    |  17 +-
 .../failures/NetworkShutdownFailure.groovy      |   7 +-
 .../itest/failures/ServiceKilledFailure.groovy  |   3 +-
 .../itest/failures/ServiceRestartFailure.groovy |   3 +-
 .../itest/junit/OrderedParameterized.java       | 211 ++++----
 .../pmanager/AptCmdLinePackageManager.groovy    |  16 +-
 .../bigtop/itest/pmanager/DEBPackage.groovy     |   2 +-
 .../itest/pmanager/PackageInstance.groovy       |   4 +-
 .../bigtop/itest/pmanager/PackageManager.groovy |  60 +--
 .../bigtop/itest/pmanager/RPMPackage.groovy     |   8 +-
 .../pmanager/UrpmiCmdLinePackageManager.groovy  |   6 +-
 .../pmanager/YumCmdLinePackageManager.groovy    |   8 +-
 .../pmanager/ZypperCmdLinePackageManager.groovy |   6 +-
 .../bigtop/itest/posix/Alternative.groovy       |  75 +--
 .../org/apache/bigtop/itest/posix/UGI.groovy    |  22 +-
 .../apache/bigtop/itest/shell/JUnitShell.groovy |   4 +-
 .../org/apache/bigtop/itest/shell/OS.groovy     |   6 +-
 .../org/apache/bigtop/itest/shell/Shell.groovy  |  27 +-
 .../apache/bigtop/itest/JUnitUtilsTest.groovy   |  12 +-
 .../apache/bigtop/itest/JarContentTest.groovy   |   9 +-
 .../bigtop/itest/TestContractGroovy.groovy      |  32 +-
 .../apache/bigtop/itest/TestContractJava.java   |  99 ++--
 .../bigtop/itest/TestContractJavaProc.java      | 103 ++--
 .../bigtop/itest/TestListUtilsTest.groovy       |   2 +-
 .../IntegrationTestClusterFailures.groovy       |   6 +-
 .../itest/junit/OrderedParameterizedTest.groovy |   6 +-
 .../bigtop/itest/junit/OrderedTest.groovy       |   6 +-
 .../itest/pmanager/PackageManagerTest.groovy    |  22 +-
 .../bigtop/itest/posix/AlternativeTest.groovy   |   6 +-
 .../bigtop/itest/posix/ServiceTest.groovy       |   4 +-
 .../apache/bigtop/itest/posix/UGITest.groovy    |   4 +-
 .../itest/crunchsmoke/TestCrunchSmoke.groovy    |  22 +-
 .../itest/flumesmoke/TestFlumeSmoke.groovy      |  14 +-
 .../resources/FlumeSmokeBzip2/flume-site.xml    |   2 +-
 .../resources/FlumeSmokeDeflate/flume-site.xml  |   2 +-
 .../resources/FlumeSmokeGzip/flume-site.xml     |   2 +-
 .../itest/giraphsmoke/TestGiraphSmoke.groovy    |  40 +-
 .../bigtop/itest/hadoop/hcfs/TestCLI.java       |   4 +-
 .../itest/hadoop/hcfs/TestFuseHCFS.groovy       | 216 ++++----
 .../itest/hadoop/hdfs/TestBlockRecovery.groovy  |  10 +-
 .../itest/hadoop/hdfs/TestDFSAdmin.groovy       |  44 +-
 .../bigtop/itest/hadoop/hdfs/TestDFSCLI.java    |   2 +-
 .../itest/hadoop/hdfs/TestDistCpIntra.groovy    |  28 +-
 .../itest/hadoop/hdfs/TestFileAppend.groovy     |  38 +-
 .../bigtop/itest/hadoop/hdfs/TestFsck.groovy    |   2 +-
 .../itest/hadoop/hdfs/TestHDFSBalancer.groovy   |  14 +-
 .../itest/hadoop/hdfs/TestHDFSQuota.groovy      |  86 ++--
 .../itest/hadoop/hdfs/TestTextSnappy.groovy     |   6 +-
 .../hadoop/mapreduce/TestHadoopExamples.groovy  |  92 ++--
 .../hadoop/mapreduce/TestHadoopSmoke.groovy     |  10 +-
 .../bigtop/itest/hadoop/yarn/TestNode.groovy    |  10 +-
 .../bigtop/itest/hadoop/yarn/TestRmAdmin.groovy |  12 +-
 .../itest/hbase/smoke/IncrementalPELoad.java    |  21 +-
 .../bigtop/itest/hbase/smoke/TestCopyTable.java |  13 +-
 .../itest/hbase/smoke/TestHBaseBalancer.groovy  |   4 +-
 .../hbase/smoke/TestHBaseCompression.groovy     |   8 +-
 .../hbase/smoke/TestHBaseImportExport.groovy    |   2 +-
 .../itest/hbase/smoke/TestHBasePigSmoke.groovy  |  38 +-
 .../itest/hbase/smoke/TestHBaseSmoke.java       |   2 +-
 .../hbase/smoke/TestHFileOutputFormat.java      |  27 +-
 .../itest/hbase/smoke/TestImportTsv.groovy      |  14 +-
 .../hbase/smoke/TestLoadIncrementalHFiles.java  |  24 +-
 .../bigtop/itest/hbase/system/Putter.java       |  10 +-
 .../bigtop/itest/hbase/system/Scanner.java      |  14 +-
 .../hbase/system/TestConcurrentScanAndPut.java  |  46 +-
 .../itest/hbase/system/TestLoadAndVerify.java   |  89 ++--
 .../hbase/system/TestMoveRootMetaRegions.java   |  61 +--
 .../itest/hbase/system/TestRegionMover.java     |  23 +-
 .../bigtop/itest/hbase/util/HBaseTestUtil.java  |  21 +-
 .../hcatalogsmoke/TestHcatalogBasic.groovy      |   5 +-
 .../bigtop/itest/httpfs/TestHttpFs.groovy       | 220 ++++----
 .../bigtop/itest/huesmoke/TestHueSmoke.groovy   |  20 +-
 .../bigtop/itest/iolongevity/TestDFSIO.groovy   |  10 +-
 .../bigtop/itest/iolongevity/TestSLive.groovy   |  48 +-
 .../mahout/smoke/TestMahoutExamples.groovy      | 496 +++++++++----------
 .../itest/ooziesmoke/TestOozieSmoke.groovy      |  48 +-
 .../bigtop/itest/packagesmoke/BTServices.groovy | 132 ++---
 .../packagesmoke/CDHUpgradeSequence.groovy      |   6 +-
 .../bigtop/itest/packagesmoke/DeployCDH.groovy  | 118 ++---
 .../itest/packagesmoke/PackageTestCommon.groovy | 219 ++++----
 .../packagesmoke/PackageTestErrorProxy.java     |  10 +-
 .../packagesmoke/PackageTestRepoMgr.groovy      |  18 +-
 .../itest/packagesmoke/StateVerifier.groovy     |   1 +
 .../packagesmoke/StateVerifierFlume.groovy      |  14 +-
 .../packagesmoke/StateVerifierHBase.groovy      |   8 +-
 .../itest/packagesmoke/StateVerifierHue.groovy  |   6 +-
 .../packagesmoke/StateVerifierOozie.groovy      |   2 +-
 .../packagesmoke/StateVerifierSqoop.groovy      |   2 +-
 .../packagesmoke/StateVerifierZookeeper.groovy  |   4 +-
 .../packagesmoke/TestPackagesBasics.groovy      |  79 +--
 .../TestPackagesBasicsWithRM.groovy             |   2 +-
 .../TestPackagesPseudoDistributed.groovy        |   6 +-
 ...stPackagesPseudoDistributedDependency.groovy |   6 +-
 ...PackagesPseudoDistributedFileContents.groovy |   6 +-
 ...TestPackagesPseudoDistributedServices.groovy |   6 +-
 .../TestPackagesPseudoDistributedState.groovy   |   4 +-
 .../TestPackagesPseudoDistributedUpgrade.groovy |  10 +-
 .../TestPackagesPseudoDistributedWithRM.groovy  |   5 +-
 .../itest/packagesmoke/TestServices.groovy      |  18 +-
 .../packagesmoke/TestServicesCreateState.groovy |   4 +-
 .../TestServicesCreateStateMissing.groovy       |   2 +-
 .../packagesmoke/TestServicesVerifyState.groovy |   2 +-
 .../itest/phoenix/smoke/TestPhoenixSmoke.groovy |  13 +-
 .../bigtop/itest/solr/smoke/SolrTestBase.groovy |   2 +-
 .../bigtop/itest/solr/smoke/TestIndexing.groovy |   2 +-
 .../bigtop/itest/solr/smoke/TestPing.groovy     |   1 +
 .../bigtop/itest/solr/smoke/TestSimple.groovy   |   4 +-
 .../itest/solr/smoke/TestStatistics.groovy      |   5 +-
 .../bigtop/itest/spark/TestSparkSmoke.groovy    |  10 +-
 .../sqoop/IntegrationTestSqoopHBase.groovy      |  11 +-
 .../sqoop/IntegrationTestSqoopHive.groovy       |   6 +-
 .../bigtop/itest/sqoop/TestSqoopExport.groovy   |  28 +-
 .../bigtop/itest/sqoop/TestSqoopImport.groovy   |  67 ++-
 124 files changed, 1901 insertions(+), 1837 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Contract.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Contract.java b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Contract.java
index 1a79df2..31be813 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Contract.java
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Contract.java
@@ -35,5 +35,6 @@ import java.lang.annotation.Target;
 @Target(ElementType.TYPE)
 public @interface Contract {
   Variable[] env();
+
   Property[] properties();
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JUnitUtils.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JUnitUtils.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JUnitUtils.groovy
index 4433d7d..c7bcd0f 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JUnitUtils.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JUnitUtils.groovy
@@ -32,23 +32,23 @@ public class JUnitUtils {
    */
   static boolean executeTests(Class... testClasses) {
     def ant = new AntBuilder()
-    def res = ant.junit (printsummary:'yes', fork:'yes', forkmode:'once', 
-                         errorproperty: DETECT_ERRORS, failureproperty: DETECT_FAILS) {
-          System.getProperties().each { k, v ->
-            sysproperty(key: k, value: v)
-          }
-          classpath {
-             System.getProperty('java.class.path').
-                    split(System.getProperty('path.separator',':')).each {
-               pathelement(location: it);
-             }
-          }
-          testClasses.each {
-            test(name: it.getName(), todir: System.getProperty(RESULTS_DIR, '.'));
-          }
-          formatter(type:'xml');
+    def res = ant.junit(printsummary: 'yes', fork: 'yes', forkmode: 'once',
+      errorproperty: DETECT_ERRORS, failureproperty: DETECT_FAILS) {
+      System.getProperties().each { k, v ->
+        sysproperty(key: k, value: v)
+      }
+      classpath {
+        System.getProperty('java.class.path').
+          split(System.getProperty('path.separator', ':')).each {
+          pathelement(location: it);
+        }
+      }
+      testClasses.each {
+        test(name: it.getName(), todir: System.getProperty(RESULTS_DIR, '.'));
+      }
+      formatter(type: 'xml');
     }
     return !(ant.project.getProperty(DETECT_FAILS) == "true" ||
-             ant.project.getProperty(DETECT_ERRORS) == "true");
+      ant.project.getProperty(DETECT_ERRORS) == "true");
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JarContent.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JarContent.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JarContent.groovy
index d376482..59edb40 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JarContent.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/JarContent.groovy
@@ -96,7 +96,8 @@ public abstract class JarContent {
    * @param filters list of patterns
    * @return filtered-out list of entries
    */
-  def static List<String> applyExcludeFilter(final List<String> list, final List<String> filters) {
+  def static List<String> applyExcludeFilter(
+    final List<String> list, final List<String> filters) {
     List<String> filtered = list.asList();
     ArrayList<String> toRemove = new ArrayList<String>();
 
@@ -155,10 +156,10 @@ public abstract class JarContent {
   public static String getJarName(String baseDir, String namePattern) {
     try {
       return new File(baseDir).list(
-          [accept: {d, f -> f ==~ /$namePattern/ }] as FilenameFilter
+        [accept: { d, f -> f ==~ /$namePattern/ }] as FilenameFilter
       ).toList().get(0);
     } catch (java.lang.IndexOutOfBoundsException ioob) {
-      LOG.error ("No $namePattern has been found under $baseDir. Check your installation.");
+      LOG.error("No $namePattern has been found under $baseDir. Check your installation.");
     } catch (java.lang.NullPointerException npe) {
       LOG.error("No $baseDir exists. Check your installation.");
     }
@@ -173,23 +174,23 @@ public abstract class JarContent {
    * @param includes
    * @throws IOException if can't find class' jar file in the classpath
    */
-  public static void unpackJarContainer (Class ref,
-    String destination, String includes) throws IOException {
+  public static void unpackJarContainer(Class ref,
+                                        String destination, String includes) throws IOException {
     URL connection = JarContent.getJarURL(ref);
     if (connection == null) {
       throw new IOException("Class " + ref.getSimpleName() +
-          " doesn't belong to any jar file in the classpath");
+        " doesn't belong to any jar file in the classpath");
     }
     ZipInputStream fis =
       new ZipInputStream(connection.openConnection().getInputStream());
-    fis.unzip (destination, includes);
+    fis.unzip(destination, includes);
 
   }
 
-  public static unpackJarContainer (String className,
-    String destination, String includes) throws IOException {
+  public static unpackJarContainer(String className,
+                                   String destination, String includes) throws IOException {
     Class cl = Class.forName(className)
-    unpackJarContainer (cl, destination, includes)
+    unpackJarContainer(cl, destination, includes)
   }
 
   private static void bootstrapPlugins() {
@@ -217,7 +218,7 @@ public abstract class JarContent {
           if (!entry.isDirectory()) {
             new File(dest + File.separator + entry.name).parentFile?.mkdirs()
             def output = new FileOutputStream(dest + File.separator
-                + entry.name)
+              + entry.name)
             output.withStream {
               int len;
               byte[] buffer = new byte[4096]
@@ -225,8 +226,7 @@ public abstract class JarContent {
                 output.write(buffer, 0, len);
               }
             }
-          }
-          else {
+          } else {
             new File(dest + File.separator + entry.name).mkdir()
           }
         }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/LogErrorsUtils.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/LogErrorsUtils.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/LogErrorsUtils.groovy
index 6f46935..480c4ff 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/LogErrorsUtils.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/LogErrorsUtils.groovy
@@ -22,12 +22,12 @@ import org.apache.bigtop.itest.shell.Shell
 
 public class LogErrorsUtils {
 
-  static void logError (final Shell sh) {
+  static void logError(final Shell sh) {
     if (sh.getRet()) {
-      println ('Failed command: ' + sh.script);
-      println ('\terror code: ' + sh.getRet());
-      println ('\tstdout: ' + sh.getOut());
-      println ('\tstderr: ' + sh.getErr());
+      println('Failed command: ' + sh.script);
+      println('\terror code: ' + sh.getRet());
+      println('\tstdout: ' + sh.getOut());
+      println('\tstderr: ' + sh.getErr());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/ParameterSetter.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/ParameterSetter.java b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/ParameterSetter.java
index 2930f19..7fa03f2 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/ParameterSetter.java
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/ParameterSetter.java
@@ -41,6 +41,7 @@ public class ParameterSetter {
    * with a digit.)
    * If an environment variable is required and it is not set, an
    * AssertionError is thrown.
+   *
    * @param target the test class
    * @throws NoSuchFieldException
    * @throws IllegalAccessException
@@ -68,10 +69,11 @@ public class ParameterSetter {
    * names.
    * If an environment variable is required and it is not set, an
    * AssertionError is thrown.
-   * @param target the test class
+   *
+   * @param target     the test class
    * @param fieldNames the names of the static fields corresponding to the
-   * environment variables; the number of names must match the number of
-   * environment variables
+   *                   environment variables; the number of names must match the number of
+   *                   environment variables
    * @throws NoSuchFieldException
    * @throws IllegalAccessException
    */
@@ -103,6 +105,7 @@ public class ParameterSetter {
    * If a system property is not set, the parameter is set to a default value.
    * Therefore usable default values must be provided in the annotation or else
    * test logic must be written to handle the lack thereof.
+   *
    * @param target the test class
    * @throws NoSuchFieldException
    * @throws IllegalAccessException
@@ -117,17 +120,17 @@ public class ParameterSetter {
       Field field = target.getDeclaredField(name.replace('.', '_'));
       Object value = null;
       switch (prop.type()) {
-      case STRING:
-        value = System.getProperty(name, prop.defaultValue());
-        break;
-      case INT:
-        value = Integer.getInteger(name, prop.intValue());
-        break;
-      case LONG:
-        value = Long.getLong(name, prop.longValue());
-        break;
-      case BOOLEAN:
-        value = Boolean.getBoolean(name);
+        case STRING:
+          value = System.getProperty(name, prop.defaultValue());
+          break;
+        case INT:
+          value = Integer.getInteger(name, prop.intValue());
+          break;
+        case LONG:
+          value = Long.getLong(name, prop.longValue());
+          break;
+        case BOOLEAN:
+          value = Boolean.getBoolean(name);
       }
       field.setAccessible(true);
       field.set(target, value);
@@ -141,10 +144,11 @@ public class ParameterSetter {
    * If a system property is not set, the parameter is set to a default value.
    * Therefore usable default values must be provided in the annotation or else
    * test logic must be written to handle the lack thereof.
-   * @param target the test class
+   *
+   * @param target     the test class
    * @param fieldNames the names of the static fields corresponding to the
-   * system properties; the number of names must match the number of
-   * system properties
+   *                   system properties; the number of names must match the number of
+   *                   system properties
    * @throws NoSuchFieldException
    * @throws IllegalAccessException
    */
@@ -159,17 +163,17 @@ public class ParameterSetter {
       Field field = target.getDeclaredField(fieldNames[i]);
       Object value = null;
       switch (prop.type()) {
-      case STRING:
-        value = System.getProperty(name, prop.defaultValue());
-        break;
-      case INT:
-        value = Integer.getInteger(name, prop.intValue());
-        break;
-      case LONG:
-        value = Long.getLong(name, prop.longValue());
-        break;
-      case BOOLEAN:
-        value = Boolean.getBoolean(name);
+        case STRING:
+          value = System.getProperty(name, prop.defaultValue());
+          break;
+        case INT:
+          value = Integer.getInteger(name, prop.intValue());
+          break;
+        case LONG:
+          value = Long.getLong(name, prop.longValue());
+          break;
+        case BOOLEAN:
+          value = Boolean.getBoolean(name);
       }
       field.setAccessible(true);
       field.set(target, value);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Property.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Property.java b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Property.java
index 19cd657..f10a0d3 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Property.java
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Property.java
@@ -21,21 +21,26 @@ package org.apache.bigtop.itest;
 import java.lang.annotation.Documented;
 
 /**
-   Specifies a parameter to be passed into a test via a system property.
-   The parameter may be a String, an int, a long, or a boolean. If the type
-   of the parameter is not specified, it defaults to String.
-   A default value (String value, int value, long value) may be specified
-   for the parameter if its type is not boolean; the default value of a
-   boolean parameter is false.
-*/
+ * Specifies a parameter to be passed into a test via a system property.
+ * The parameter may be a String, an int, a long, or a boolean. If the type
+ * of the parameter is not specified, it defaults to String.
+ * A default value (String value, int value, long value) may be specified
+ * for the parameter if its type is not boolean; the default value of a
+ * boolean parameter is false.
+ */
 @Documented
 public @interface Property {
   public static enum Type {
     STRING, INT, LONG, BOOLEAN;
   }
+
   String name();
+
   Type type() default Type.STRING;
+
   String defaultValue() default "";
+
   int intValue() default 0;
+
   long longValue() default 0L;
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestListUtils.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestListUtils.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestListUtils.groovy
index 7b762d1..c716486 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestListUtils.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestListUtils.groovy
@@ -33,7 +33,7 @@ public class TestListUtils {
     }
 
     List<String> pathArray = pathName.split(FS).toList();
-    def prefix =  "";
+    def prefix = "";
     if (dirPrefix != null)
       prefix = dirPrefix;
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
index f4bc247..b513681 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
@@ -40,7 +40,7 @@ public class TestUtils {
    */
   public static void unpackTestResources(Class ref, String inputDir, String[] inputFiles, String outputDir) {
     // Unpack resource
-    JarContent.unpackJarContainer(ref, '.' , null);
+    JarContent.unpackJarContainer(ref, '.', null);
 
     // create input dir in HDFS
     if (inputDir != null) {
@@ -48,7 +48,7 @@ public class TestUtils {
       if (sh.getRet() == 0) {
         sh.exec("hadoop fs -rmr -skipTrash ${inputDir}");
         assertTrue("Deletion of previous $inputDir from the DFS failed",
-            sh.getRet() == 0);
+          sh.getRet() == 0);
       }
       if (inputFiles != null) {
         sh.exec("hadoop fs -mkdir -p ${inputDir}");
@@ -71,7 +71,7 @@ public class TestUtils {
       if (sh.getRet() == 0) {
         sh.exec("hadoop fs -rmr -skipTrash ${outputDir}");
         assertTrue("Deletion of previous examples output from the DFS failed",
-            sh.getRet() == 0);
+          sh.getRet() == 0);
       }
       sh.exec("hadoop fs -mkdir -p ${outputDir}");
       assertTrue("Could not create output directory in DFS", sh.getRet() == 0);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Variable.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Variable.java b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Variable.java
index 5753a7c..ff5a084 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Variable.java
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/Variable.java
@@ -21,13 +21,14 @@ package org.apache.bigtop.itest;
 import java.lang.annotation.Documented;
 
 /**
-   Specifies a parameter to be passed into a test via an environment variable.
-   The parameter is a String.
-   By default, the parameter is required. If it is required and a non-null value
-   cannot be found for it, an error may be thrown.
-*/
+ * Specifies a parameter to be passed into a test via an environment variable.
+ * The parameter is a String.
+ * By default, the parameter is required. If it is required and a non-null value
+ * cannot be found for it, an error may be thrown.
+ */
 @Documented
 public @interface Variable {
   String name();
+
   boolean required() default true;
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/AbstractFailure.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/AbstractFailure.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/AbstractFailure.groovy
index f88358f..0505d7f 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/AbstractFailure.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/AbstractFailure.groovy
@@ -101,20 +101,19 @@ public abstract class AbstractFailure implements Runnable {
   @Override
   public void run() {
     try {
-      if(failureDelay > 0) {
+      if (failureDelay > 0) {
         try {
           Thread.sleep(failureDelay)
         } catch (InterruptedException e) {
           return
         }
       }
-      if(FailureVars.instance.getServiceRestart().equals("true")
+      if (FailureVars.instance.getServiceRestart().equals("true")
         || FailureVars.instance.getServiceKill().equals("true")
         || FailureVars.instance.getNetworkShutdown().equals("true")) {
         runFailCommands()
         Thread.sleep(FailureVars.instance.getKillDuration())
-      }
-      else {
+      } else {
         if (failureDelay > 0) {
           try {
             Thread.sleep(failureDelay)

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureExecutor.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureExecutor.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureExecutor.groovy
index daad092..7471186 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureExecutor.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureExecutor.groovy
@@ -31,15 +31,16 @@ public class FailureExecutor implements Runnable {
     if (startDelay > 0) {
       try {
         Thread.sleep(startDelay)
-      } catch (InterruptedException e) {}
+      } catch (InterruptedException e) {
+      }
     }
-    if(restart != null && restart.equals("true")) {
+    if (restart != null && restart.equals("true")) {
       serviceRestartExec();
     }
-    if(kill != null && kill.equals("true")) {
+    if (kill != null && kill.equals("true")) {
       serviceKillExec();
     }
-    if(shutdown != null && shutdown.equals("true")) {
+    if (shutdown != null && shutdown.equals("true")) {
       networkShutdownExec();
     }
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureVars.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureVars.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureVars.groovy
index 6851337..9e9528c 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureVars.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/FailureVars.groovy
@@ -50,13 +50,13 @@ public class FailureVars {
   }
 
   protected FailureVars() {
-    if(propertyFile != null) {
+    if (propertyFile != null) {
       loadProps();
     }
   }
 
   public static FailureVars getInstance() {
-    if(instance == null) {
+    if (instance == null) {
       instance = new FailureVars();
     }
     return instance;
@@ -65,8 +65,8 @@ public class FailureVars {
   private void loadProps() {
     try {
       File pFile = new File(propertyFile);
-      assert(pFile.exists()) : "Failure properties file cannot be read";
-      BufferedReader is = new BufferedReader (new InputStreamReader(getClass(pFile)));
+      assert (pFile.exists()): "Failure properties file cannot be read";
+      BufferedReader is = new BufferedReader(new InputStreamReader(getClass(pFile)));
       System.out.println("Input Stream Location: " + is);
       Properties props = new Properties();
       props.load(is);
@@ -82,7 +82,7 @@ public class FailureVars {
       startDelay = Long.parseLong(props.getProperty("startdelay"));
       killDuration = Long.parseLong(props.getProperty("killduration"));
     }
-    catch(IOException ioe) {
+    catch (IOException ioe) {
       System.out.println(ioe.getMessage());
     }
   }
@@ -124,8 +124,9 @@ public class FailureVars {
   }
 
   public long getKillDuration() {
-    return killDuration*1000;
+    return killDuration * 1000;
   }
+
   public String getTestHost() {
     return testHost;
   }
@@ -139,7 +140,7 @@ public class FailureVars {
   }
 
   public long getStartDelay() {
-    return startDelay*1000;
+    return startDelay * 1000;
   }
 
 
@@ -160,7 +161,7 @@ public class FailureVars {
   }
 
   public long getFailureDelay() {
-    return failureDelay*1000;
+    return failureDelay * 1000;
   }
 
   int getSleepTime() {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/NetworkShutdownFailure.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/NetworkShutdownFailure.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/NetworkShutdownFailure.groovy
index 15bf797..c3483c2 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/NetworkShutdownFailure.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/NetworkShutdownFailure.groovy
@@ -48,8 +48,8 @@ public class NetworkShutdownFailure extends AbstractFailure {
    * @param startDelay time in milliseconds) the failures will wait before start
    */
   public NetworkShutdownFailure(String srcHost,
-                               List<String> dstHosts,
-                               long startDelay) {
+                                List<String> dstHosts,
+                                long startDelay) {
 
     super(new ArrayList<String>(), startDelay)
     populateCommandsList(srcHost, dstHosts)
@@ -58,7 +58,8 @@ public class NetworkShutdownFailure extends AbstractFailure {
   /*
    * Populate commands list, making choice between local execution and remote one.
    */
-  private void populateCommandsList(String host, List<String> dstHosts){
+
+  private void populateCommandsList(String host, List<String> dstHosts) {
     if ("localhost".equalsIgnoreCase(host)) {
       dstHosts.each { dstHost ->
         failCommands.add(String.format(DROP_INPUT_CONNECTIONS, dstHost))

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceKilledFailure.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceKilledFailure.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceKilledFailure.groovy
index 413f171..2e31448 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceKilledFailure.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceKilledFailure.groovy
@@ -55,7 +55,8 @@ public class ServiceKilledFailure extends AbstractFailure {
   /*
    * Populate commands list, making choice between local execution and remote one.
    */
-  private void populateCommandsList(List<String> hosts, String serviceName){
+
+  private void populateCommandsList(List<String> hosts, String serviceName) {
     if (hosts.size() == 1 && "localhost".equalsIgnoreCase(hosts[0])) {
       failCommands.add(String.format(KILL_SERVICE_TEMPLATE, serviceName))
       restoreCommands.add(String.format(START_SERVICE_TEMPLATE, serviceName))

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceRestartFailure.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceRestartFailure.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceRestartFailure.groovy
index 6dd1005..213a067 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceRestartFailure.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/failures/ServiceRestartFailure.groovy
@@ -55,7 +55,8 @@ public class ServiceRestartFailure extends AbstractFailure {
   /*
    * Populate commands list, making choice between local execution and remote one.
    */
-  private void populateCommandsList(List<String> hosts, String serviceName){
+
+  private void populateCommandsList(List<String> hosts, String serviceName) {
     if (hosts.size() == 1 && "localhost".equalsIgnoreCase(hosts[0])) {
       failCommands.add(String.format(STOP_SERVICE_TEMPLATE, serviceName))
       restoreCommands.add(String.format(START_SERVICE_TEMPLATE, serviceName))

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/junit/OrderedParameterized.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/junit/OrderedParameterized.java b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/junit/OrderedParameterized.java
index f86b22d..8df776f 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/junit/OrderedParameterized.java
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/junit/OrderedParameterized.java
@@ -36,15 +36,15 @@ import java.lang.reflect.Modifier;
 import java.util.*;
 
 /**
- * This is a modification of a Parameterized JUnit runner (which has been relicensed 
+ * This is a modification of a Parameterized JUnit runner (which has been relicensed
  * under APL for this very hack BIGTOP-18) that takes care of two things:
- *   1. it lets arrange individual tests into ordered sequence of run stages via adding a
- *      a &#064;RunStage(level=X) annotation to the desired testcases (default run stage
- *      is 0). Later on run stages are executed according to the order of their levels
- *      and testcases within the same run stage have no guaranteed order of execution.
- *   2. it lets give names to the parameterized testcases via making the factory method
- *      &#064;Parameters return a Map (mapping names to testcases) instead of a List.
- *
+ * 1. it lets arrange individual tests into ordered sequence of run stages via adding a
+ * a &#064;RunStage(level=X) annotation to the desired testcases (default run stage
+ * is 0). Later on run stages are executed according to the order of their levels
+ * and testcases within the same run stage have no guaranteed order of execution.
+ * 2. it lets give names to the parameterized testcases via making the factory method
+ * &#064;Parameters return a Map (mapping names to testcases) instead of a List.
+ * <p/>
  * Here's how to use it:
  * <pre>
  * public class Example {
@@ -67,120 +67,121 @@ import java.util.*;
  *    public static Map<String, Object[]> generateTests() {
  *      HashMap<String, Object[]> res = new HashMap();
  *      res.put("test name", new Object[] {1, 2});
-        return res;
+ * return res;
  *    }
  * }
  * </pre>
- *
  */
 public class OrderedParameterized extends Suite {
-	/**
-	 * Annotation for a method which provides parameters to be injected into the
-	 * test class constructor by <code>Parameterized</code>
-	 */
-    @Retention(RetentionPolicy.RUNTIME)
-	@Target(ElementType.METHOD)
-    public @interface RunStage {
-      int level() default 0;
-    };
-
-    private class TestClassRunnerForParameters extends
-            BlockJUnit4ClassRunner {
-        private final String fParameterSetNumber;
-
-        private final Map<String, Object[]> fParameterList;
-
-        TestClassRunnerForParameters(Class<?> type,
-                Map<String, Object[]> parameterList, String i) throws InitializationError {
-            super(type);
-            fParameterList= parameterList;
-            fParameterSetNumber= i;
-        }
-
-        @Override
-        public Object createTest() throws Exception {
-            return getTestClass().getOnlyConstructor().newInstance(
-                    computeParams());
-        }
-
-        private Object[] computeParams() throws Exception {
-            try {
-                return fParameterList.get(fParameterSetNumber);
-            } catch (ClassCastException e) {
-                throw new Exception(String.format(
-                        "%s.%s() must return a Map from Strings to arrays.",
-                        getTestClass().getName(), getParametersMethod(
-                                getTestClass()).getName()));
-            }
-        }
-
-        @Override
-        protected List<FrameworkMethod> getChildren() {
-            List<FrameworkMethod> c = super.getChildren();
-            Collections.sort(c, new Comparator<FrameworkMethod>() {
-              public int compare(FrameworkMethod m1, FrameworkMethod m2) {
-                RunStage r1 = m1.getAnnotation(RunStage.class);
-                RunStage r2 = m2.getAnnotation(RunStage.class);
-                return ((r1 != null) ? r1.level() : 0) -
-                       ((r2 != null) ? r2.level() : 0);
-              }
-            });
-            return c;
-        }
-
-        @Override
-        protected String getName() {
-            return String.format("[%s]", fParameterSetNumber);
-        }
-
-        @Override
-        protected String testName(final FrameworkMethod method) {
-            return String.format("%s[%s]", method.getName(),
-                    fParameterSetNumber);
-        }
+  /**
+   * Annotation for a method which provides parameters to be injected into the
+   * test class constructor by <code>Parameterized</code>
+   */
+  @Retention(RetentionPolicy.RUNTIME)
+  @Target(ElementType.METHOD)
+  public @interface RunStage {
+    int level() default 0;
+  }
+
+  ;
+
+  private class TestClassRunnerForParameters extends
+      BlockJUnit4ClassRunner {
+    private final String fParameterSetNumber;
+
+    private final Map<String, Object[]> fParameterList;
+
+    TestClassRunnerForParameters(Class<?> type,
+                                 Map<String, Object[]> parameterList, String i) throws InitializationError {
+      super(type);
+      fParameterList = parameterList;
+      fParameterSetNumber = i;
+    }
 
-        @Override
-        protected void validateConstructor(List<Throwable> errors) {
-            validateOnlyOneConstructor(errors);
-        }
+    @Override
+    public Object createTest() throws Exception {
+      return getTestClass().getOnlyConstructor().newInstance(
+          computeParams());
+    }
 
-        @Override
-        protected Statement classBlock(RunNotifier notifier) {
-            return childrenInvoker(notifier);
-        }
+    private Object[] computeParams() throws Exception {
+      try {
+        return fParameterList.get(fParameterSetNumber);
+      } catch (ClassCastException e) {
+        throw new Exception(String.format(
+            "%s.%s() must return a Map from Strings to arrays.",
+            getTestClass().getName(), getParametersMethod(
+            getTestClass()).getName()));
+      }
     }
 
-    private FrameworkMethod getParametersMethod(TestClass testClass)
-            throws Exception {
-        List<FrameworkMethod> methods= testClass
-                .getAnnotatedMethods(Parameterized.Parameters.class);
-        for (FrameworkMethod each : methods) {
-            int modifiers= each.getMethod().getModifiers();
-            if (Modifier.isStatic(modifiers) && Modifier.isPublic(modifiers))
-                return each;
+    @Override
+    protected List<FrameworkMethod> getChildren() {
+      List<FrameworkMethod> c = super.getChildren();
+      Collections.sort(c, new Comparator<FrameworkMethod>() {
+        public int compare(FrameworkMethod m1, FrameworkMethod m2) {
+          RunStage r1 = m1.getAnnotation(RunStage.class);
+          RunStage r2 = m2.getAnnotation(RunStage.class);
+          return ((r1 != null) ? r1.level() : 0) -
+              ((r2 != null) ? r2.level() : 0);
         }
-
-        throw new Exception("No public static parameters method on class "
-                + testClass.getName());
+      });
+      return c;
     }
 
-    private final ArrayList<Runner> runners= new ArrayList<Runner>();
+    @Override
+    protected String getName() {
+      return String.format("[%s]", fParameterSetNumber);
+    }
 
     @Override
-    protected List<Runner> getChildren() {
-        return runners;
+    protected String testName(final FrameworkMethod method) {
+      return String.format("%s[%s]", method.getName(),
+          fParameterSetNumber);
     }
 
-    @SuppressWarnings("unchecked")
-    private Map<String, Object[]> getParametersList(TestClass klass) throws Throwable {
-        return (Map<String, Object[]>) getParametersMethod(klass).invokeExplosively(null);
+    @Override
+    protected void validateConstructor(List<Throwable> errors) {
+      validateOnlyOneConstructor(errors);
     }
 
-    public OrderedParameterized(Class<?> klass) throws Throwable {
-        super(klass, Collections.<Runner>emptyList());
-        Map<String, Object[]> parametersMap= getParametersList(getTestClass());
-        for (Map.Entry<String, Object[]> entry : parametersMap.entrySet())
-            runners.add(new TestClassRunnerForParameters(getTestClass().getJavaClass(),
-                    parametersMap, entry.getKey()));
+    @Override
+    protected Statement classBlock(RunNotifier notifier) {
+      return childrenInvoker(notifier);
+    }
+  }
+
+  private FrameworkMethod getParametersMethod(TestClass testClass)
+      throws Exception {
+    List<FrameworkMethod> methods = testClass
+        .getAnnotatedMethods(Parameterized.Parameters.class);
+    for (FrameworkMethod each : methods) {
+      int modifiers = each.getMethod().getModifiers();
+      if (Modifier.isStatic(modifiers) && Modifier.isPublic(modifiers))
+        return each;
     }
+
+    throw new Exception("No public static parameters method on class "
+        + testClass.getName());
+  }
+
+  private final ArrayList<Runner> runners = new ArrayList<Runner>();
+
+  @Override
+  protected List<Runner> getChildren() {
+    return runners;
+  }
+
+  @SuppressWarnings("unchecked")
+  private Map<String, Object[]> getParametersList(TestClass klass) throws Throwable {
+    return (Map<String, Object[]>) getParametersMethod(klass).invokeExplosively(null);
+  }
+
+  public OrderedParameterized(Class<?> klass) throws Throwable {
+    super(klass, Collections.<Runner>emptyList());
+    Map<String, Object[]> parametersMap = getParametersList(getTestClass());
+    for (Map.Entry<String, Object[]> entry : parametersMap.entrySet())
+      runners.add(new TestClassRunnerForParameters(getTestClass().getJavaClass(),
+          parametersMap, entry.getKey()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/AptCmdLinePackageManager.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/AptCmdLinePackageManager.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/AptCmdLinePackageManager.groovy
index 49d2145..26fd7fd 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/AptCmdLinePackageManager.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/AptCmdLinePackageManager.groovy
@@ -20,13 +20,13 @@ package org.apache.bigtop.itest.pmanager
 
 class AptCmdLinePackageManager extends PackageManager {
   // FIXME: NB export DEBIAN_FRONTEND=noninteractive
-  String type  = "apt";
+  String type = "apt";
   String repository_registry = "/etc/apt/sources.list.d/%s.list";
 
   private static final ROOT_URL = "http://us.archive.ubuntu.com/ubuntu/";
 
   public void setDefaults(String defaults) {
-     shRoot.exec("debconf-set-selections <<__EOT__\n${defaults}\n__EOT__");
+    shRoot.exec("debconf-set-selections <<__EOT__\n${defaults}\n__EOT__");
   }
 
   // FIXME: Debian doesn't have a way ot passing a full description
@@ -47,11 +47,11 @@ class AptCmdLinePackageManager extends PackageManager {
       url = ROOT_URL;
 
     if (key) {
-        def text = key.toURL().text;
-        shRoot.exec("apt-key add - <<__EOT__\n${text}\n__EOT__");
-        if (shRoot.getRet()) {
-          return shRoot.getRet();
-        }
+      def text = key.toURL().text;
+      shRoot.exec("apt-key add - <<__EOT__\n${text}\n__EOT__");
+      if (shRoot.getRet()) {
+        return shRoot.getRet();
+      }
     }
 
     return addBinRepo(record, "deb ${url} ${cookie}\ndeb-src ${url} ${cookie}");
@@ -70,7 +70,7 @@ class AptCmdLinePackageManager extends PackageManager {
   public List<PackageInstance> search(String name) {
     def packages = new ArrayList<PackageInstance>();
     shUser.exec("apt-cache search --names-only $name").out.each {
-      packages.add(PackageInstance.getPackageInstance (this, ((it =~ /^(.*)( - .*)$/)[0][1])))
+      packages.add(PackageInstance.getPackageInstance(this, ((it =~ /^(.*)( - .*)$/)[0][1])))
     }
     return packages
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/DEBPackage.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/DEBPackage.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/DEBPackage.groovy
index 6ddd0c4..bdda7cd 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/DEBPackage.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/DEBPackage.groovy
@@ -89,7 +89,7 @@ class DEBPackage extends ManagedPackage {
 
   public List<String> getFiles() {
     shUser.exec("env DEBIAN_FRONTEND=noninteractive dpkg -L $name");
-    return shUser.out.collect({"$it"});
+    return shUser.out.collect({ "$it" });
   }
 
   public List<String> getConfigs() {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageInstance.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageInstance.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageInstance.groovy
index 1cfc7d9..226b238 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageInstance.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageInstance.groovy
@@ -108,8 +108,8 @@ abstract class PackageInstance {
    * @param name package manager dependent name of a package
    */
   static public PackageInstance getPackageInstance(PackageManager mgr, String name) {
-    PackageInstance pkg = (mgr.type == "apt" ) ? new DEBPackage() :
-                                                 new RPMPackage();
+    PackageInstance pkg = (mgr.type == "apt") ? new DEBPackage() :
+      new RPMPackage();
     pkg.mgr = mgr;
     pkg.name = name;
     return pkg;

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageManager.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageManager.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageManager.groovy
index c8a6178..9e58ed6 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageManager.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/PackageManager.groovy
@@ -40,17 +40,18 @@ public abstract class PackageManager {
    * @param cookie an optional, package manager specific opaque string
    * @return int return code of the operation: o in case of success, non-zero otherwise
    */
-  abstract public int addBinRepo(String record, String url, String key, String cookie)
-  /**
-  * Register a binary package repository so that packages can be accessed from it.
-  * NOTE: repository management is assumed to follow a KVP API with unique implementation
-  * specific keys (records) referencing tuples of information describing a repository
-  *
-  * @param record a package manager specific KEY portion of the repository registration (null is default)
-  * @param url a URL containing the packages constituting the repository (null is default)
-  * @param key an optional (can be null) cryptographic key for authenticating the content of the repository
-  * @return int return code of the operation: o in case of success, non-zero otherwise
-  */
+  abstract
+  public int addBinRepo(String record, String url, String key, String cookie)
+  /**
+   * Register a binary package repository so that packages can be accessed from it.
+   * NOTE: repository management is assumed to follow a KVP API with unique implementation
+   * specific keys (records) referencing tuples of information describing a repository
+   *
+   * @param record a package manager specific KEY portion of the repository registration (null is default)
+   * @param url a URL containing the packages constituting the repository (null is default)
+   * @param key an optional (can be null) cryptographic key for authenticating the content of the repository
+   * @return int return code of the operation: o in case of success, non-zero otherwise
+   */
   public int addBinRepo(String record, String url, String key) {
     addBinRepo(record, url);
   }
@@ -80,7 +81,6 @@ public abstract class PackageManager {
    */
   abstract public int refresh()
 
-
   /**
    * De-register a binary package repository.
    *
@@ -133,9 +133,9 @@ public abstract class PackageManager {
    * @param pkg a package that is expected to provide 0, 1 or multiple services
    * @return list of Service instances
    */
-   public Map<String, Service> getServices(PackageInstance pkg) {
-     return pkg.getServices();
-   }
+  public Map<String, Service> getServices(PackageInstance pkg) {
+    return pkg.getServices();
+  }
 
   /**
    * List a content of a given package
@@ -143,9 +143,9 @@ public abstract class PackageManager {
    * @param pkg a package that is expected to provide >1 entry in its content
    * @return list file and directory names belong to the package.
    */
-   public List<String> getContentList(PackageInstance pkg) {
-     return pkg.getFiles();
-   }
+  public List<String> getContentList(PackageInstance pkg) {
+    return pkg.getFiles();
+  }
 
   /**
    * List config files in a given package
@@ -153,9 +153,9 @@ public abstract class PackageManager {
    * @param pkg a package in question
    * @return list config file names that belong to the package.
    */
-   public List<String> getConfigs(PackageInstance pkg) {
-     return pkg.getConfigs();
-   }
+  public List<String> getConfigs(PackageInstance pkg) {
+    return pkg.getConfigs();
+  }
 
   /**
    * List documentation files in a given package
@@ -163,15 +163,15 @@ public abstract class PackageManager {
    * @param pkg a package in question
    * @return list documentation file names that belong to the package.
    */
-   public List<String> getDocs(PackageInstance pkg) {
-     return pkg.getDocs();
-   }
+  public List<String> getDocs(PackageInstance pkg) {
+    return pkg.getDocs();
+  }
 
   /**
    * type of a package manager. expected to be overwritten by concrete subclasses implementing
    * particular package managers (yum, apt, zypper, etc.)
    */
-  String type  = "abstract"
+  String type = "abstract"
 
   /**
    * A registry location for repositories to be added to. Currently all the package managers
@@ -204,11 +204,11 @@ public abstract class PackageManager {
    * @deprecated it is now recommended to use getServices() instead
    */
   @Deprecated
-   public void svc_do(PackageInstance pkg, String action) {
-     pkg.getServices().each {
-       it."$action"()
-     }
-   }
+  public void svc_do(PackageInstance pkg, String action) {
+    pkg.getServices().each {
+      it."$action"()
+    }
+  }
 
   /**
    * Returns a concrete implementation of PackageManager specific for a given linux

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/RPMPackage.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/RPMPackage.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/RPMPackage.groovy
index 080cf67..e61668a 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/RPMPackage.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/RPMPackage.groovy
@@ -42,7 +42,7 @@ class RPMPackage extends ManagedPackage {
     text.each {
       // theoretically RPM can generate multiline output for any field, but we are only allowing description & summary
       if (curMetaKey == "description" || ((it =~ /^\s+: /).find() && curMetaKey == "summary")) {
-        pkg.meta[curMetaKey] <<= "\n${it.replaceAll(/^\s+:/,'')}";
+        pkg.meta[curMetaKey] <<= "\n${it.replaceAll(/^\s+:/, '')}";
       } else {
         def m = (it =~ /(\S+)\s*:\s*(.*)/);
         if (m.size()) {
@@ -115,17 +115,17 @@ Description: %{DESCRIPTION}
 
   List<String> getFiles() {
     shUser.exec("rpm -ql $name | grep -v '^(contains no files)\$'");
-    return shUser.out.collect({"$it"});
+    return shUser.out.collect({ "$it" });
   }
 
   List<String> getConfigs() {
     shUser.exec("rpm -qc $name | grep -v '^(contains no files)\$'");
-    return shUser.out.collect({"$it"});
+    return shUser.out.collect({ "$it" });
   }
 
   List<String> getDocs() {
     shUser.exec("rpm -qd $name | grep -v '^(contains no files)\$'");
-    return shUser.out.collect({"$it"});
+    return shUser.out.collect({ "$it" });
   }
 
   Map<String, String> getDeps() {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/UrpmiCmdLinePackageManager.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/UrpmiCmdLinePackageManager.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/UrpmiCmdLinePackageManager.groovy
index 544131e..7006df2 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/UrpmiCmdLinePackageManager.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/UrpmiCmdLinePackageManager.groovy
@@ -21,8 +21,8 @@ package org.apache.bigtop.itest.pmanager
 import org.apache.bigtop.itest.posix.Service
 
 class UrpmiCmdLinePackageManager extends PackageManager {
-  String type  = "urpmi";
-  
+  String type = "urpmi";
+
   public void setDefaults(String defaults) {}
 
   public int addBinRepo(String record, String url, String key, String cookie) {
@@ -48,7 +48,7 @@ class UrpmiCmdLinePackageManager extends PackageManager {
   public List<PackageInstance> search(String name) {
     def packages = new ArrayList<PackageInstance>();
     shUser.exec("urpmq ${name} | sed -e 's/|/\\n/g' | uniq").out.each {
-      packages.add(PackageInstance.getPackageInstance (this, it))
+      packages.add(PackageInstance.getPackageInstance(this, it))
     }
     return packages
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/YumCmdLinePackageManager.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/YumCmdLinePackageManager.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/YumCmdLinePackageManager.groovy
index 32eae69..d97824b 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/YumCmdLinePackageManager.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/YumCmdLinePackageManager.groovy
@@ -21,7 +21,7 @@ package org.apache.bigtop.itest.pmanager
 import org.apache.bigtop.itest.posix.Service
 
 class YumCmdLinePackageManager extends PackageManager {
-  String type  = "yum";
+  String type = "yum";
   String repository_registry = "/etc/yum.repos.d/%s.repo";
 
   public void setDefaults(String defaults) {}
@@ -31,7 +31,7 @@ class YumCmdLinePackageManager extends PackageManager {
 name="${cookie}"
 baseurl=${url}
 gpgkey=${key}
-gpgcheck=${(key!=null)?1:0}""";
+gpgcheck=${(key != null) ? 1 : 0}""";
 
     return addBinRepo(record, descr);
   }
@@ -41,7 +41,7 @@ gpgcheck=${(key!=null)?1:0}""";
     return 0;
   }
 
-   public int cleanup() {
+  public int cleanup() {
     shRoot.exec("yum clean all");
     return shRoot.getRet();
   }
@@ -50,7 +50,7 @@ gpgcheck=${(key!=null)?1:0}""";
     def packages = new ArrayList<PackageInstance>();
     shUser.exec("yum --color=never -d 0 search $name").out.each {
       if (!(it =~ /^(===================| +: )/)) {
-        packages.add(PackageInstance.getPackageInstance (this, it.replaceAll(/\.(noarch|i386|x86_64).*$/, '')))
+        packages.add(PackageInstance.getPackageInstance(this, it.replaceAll(/\.(noarch|i386|x86_64).*$/, '')))
       }
     }
     return packages

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/ZypperCmdLinePackageManager.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/ZypperCmdLinePackageManager.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/ZypperCmdLinePackageManager.groovy
index 22d02db..326ea8c 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/ZypperCmdLinePackageManager.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/pmanager/ZypperCmdLinePackageManager.groovy
@@ -21,9 +21,9 @@ package org.apache.bigtop.itest.pmanager
 import org.apache.bigtop.itest.posix.Service
 
 class ZypperCmdLinePackageManager extends PackageManager {
-  String type  = "zypper";
+  String type = "zypper";
   String repository_registry = "/etc/zypp/repos.d/%s.repo";
-  
+
   private String key_opts = "--gpg-auto-import-keys";
 
   public void setDefaults(String defaults) {}
@@ -51,7 +51,7 @@ class ZypperCmdLinePackageManager extends PackageManager {
   public List<PackageInstance> search(String name) {
     def packages = new ArrayList<PackageInstance>();
     shUser.exec("zypper search $name").out.each {
-      packages.add(PackageInstance.getPackageInstance (this, ((it =~ /^(.*|)(.*)(|.*)$/)[0][2])))
+      packages.add(PackageInstance.getPackageInstance(this, ((it =~ /^(.*|)(.*)(|.*)$/)[0][2])))
     }
     return packages
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/Alternative.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/Alternative.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/Alternative.groovy
index b5f2c27..25b9e9a 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/Alternative.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/Alternative.groovy
@@ -26,7 +26,7 @@ class Alternative {
   String value;
   String status;
 
-  Map    alts;
+  Map alts;
 
   Shell shUser = new Shell();
   Shell shRoot = new Shell("/bin/bash -s", "root");
@@ -47,23 +47,23 @@ class Alternative {
       def m = (it =~ /([^:]+):(.*)/);
       if (m.size()) {
         String val = (m[0][2]).trim();
-        switch(m[0][1]) {
-          case "Best"  :   best = val;
-                           break;
-          case "Value" :   value = val;
-                           break;
-          case "Status":   status = val;
-                           break;
+        switch (m[0][1]) {
+          case "Best": best = val;
+            break;
+          case "Value": value = val;
+            break;
+          case "Status": status = val;
+            break;
           case "Priority": curAlt["priority"] = val;
-                           break;
-          case "Slaves":   curAlt["slaves"] = [:];
-                           break;
+            break;
+          case "Slaves": curAlt["slaves"] = [:];
+            break;
           case "Alternative": alts[val] = [:];
-                              curAlt = alts[val];
-                              break;
+            curAlt = alts[val];
+            break;
         }
       } else if ((it =~ /^ /).find()) {
-        curAlt["slaves"][it.trim().replaceAll(/ .*$/,"")] = it.replaceAll(/ \S+ /, "");
+        curAlt["slaves"][it.trim().replaceAll(/ .*$/, "")] = it.replaceAll(/ \S+ /, "");
       }
     }
   }
@@ -79,33 +79,34 @@ class Alternative {
    *   Current `best' version is /bin/ksh93.
    * #
    */
+
   private void parse_display(List<String> metadata) {
     Map curAlt = [:];
     String val;
     metadata.each {
-      switch(it) {
-        case ~/^Current.*version is.*/ :
-                         best = it.replaceAll(/^Current.*version is\s+/, "").
-                                   replaceAll(/\s*\.$/, "");
-                         break;
-        case ~/.*link currently points to.*/ :
-                         value = it.replaceAll(/^.*link currently points to\s+/, "");
-                         break;
-        case ~/.* status is .*/ :
-                         status = it.replaceAll(/^.* status is\s+/, "").
-                                     replaceAll(/\s*\.$/, "");
-                         break;
-        case ~/^ slave .*/ :
-                         val = it.replaceAll(/^ slave /, "").replaceAll(/:.*$/, "");
-                         curAlt["slaves"][val] = it.replaceAll(/^.*: /, "").trim();
-                         break;
-        case ~/.*priority.*/ :
-                         val = it.replaceAll(/ - priority .*$/,"");
-                         alts[val] = [:];
-                         curAlt = alts[val];
-                         curAlt["priority"] = it.replaceAll(/^.* - priority /, "").trim();
-                         curAlt["slaves"] = [:];
-                         break;
+      switch (it) {
+        case ~/^Current.*version is.*/:
+          best = it.replaceAll(/^Current.*version is\s+/, "").
+            replaceAll(/\s*\.$/, "");
+          break;
+        case ~/.*link currently points to.*/:
+          value = it.replaceAll(/^.*link currently points to\s+/, "");
+          break;
+        case ~/.* status is .*/:
+          status = it.replaceAll(/^.* status is\s+/, "").
+            replaceAll(/\s*\.$/, "");
+          break;
+        case ~/^ slave .*/:
+          val = it.replaceAll(/^ slave /, "").replaceAll(/:.*$/, "");
+          curAlt["slaves"][val] = it.replaceAll(/^.*: /, "").trim();
+          break;
+        case ~/.*priority.*/:
+          val = it.replaceAll(/ - priority .*$/, "");
+          alts[val] = [:];
+          curAlt = alts[val];
+          curAlt["priority"] = it.replaceAll(/^.* - priority /, "").trim();
+          curAlt["slaves"] = [:];
+          break;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/UGI.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/UGI.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/UGI.groovy
index c47362b..d576b09 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/UGI.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/posix/UGI.groovy
@@ -49,22 +49,22 @@ class UGI {
     (new File("/etc/passwd")).eachLine {
       // NOTE: the :x bit is a workaround for split to work on strings like +::::::
       def chunks = "${it}:x".split(':');
-      users[chunks[0]] = [ passwd : chunks[1],
-                           uid    : chunks[2],
-                           gid    : chunks[3],
-                           descr  : chunks[4],
-                           home   : chunks[5],
-                           shell  : chunks[6],
-                         ];
+      users[chunks[0]] = [passwd: chunks[1],
+        uid: chunks[2],
+        gid: chunks[3],
+        descr: chunks[4],
+        home: chunks[5],
+        shell: chunks[6],
+      ];
       tmp_groups[chunks[3]] = chunks[0];
     }
     (new File("/etc/group")).eachLine {
       def chunks = it.split(':');
 
-      groups[chunks[0]] = [ passwd : chunks[1],
-                            gid    : chunks[2],
-                            users  : ((chunks.size() == 4) ? chunks[3].split(',').toList() : []),
-                          ];
+      groups[chunks[0]] = [passwd: chunks[1],
+        gid: chunks[2],
+        users: ((chunks.size() == 4) ? chunks[3].split(',').toList() : []),
+      ];
 
       if (tmp_groups[chunks[2]] != null) {
         groups[chunks[0]].users.add(tmp_groups[chunks[2]]);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/JUnitShell.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/JUnitShell.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/JUnitShell.groovy
index 5fd6da6..9a03f0a 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/JUnitShell.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/JUnitShell.groovy
@@ -78,8 +78,8 @@ class JUnitShell extends Shell {
     if (result != expectedExitCode) {
       dumpOutput()
       Assert.assertEquals(
-          "Wrong exit code of script ${script}" as String,
-          expectedExitCode, result)
+        "Wrong exit code of script ${script}" as String,
+        expectedExitCode, result)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/OS.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/OS.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/OS.groovy
index c76bfb1..198ff8c 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/OS.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/OS.groovy
@@ -34,9 +34,9 @@ class OS {
     isLinux = (System.getProperty('os.name') =~ /(?i)linux/).matches();
 
     if (isLinux) {
-        linux_flavor = "lsb_release -i -s".execute().text.trim();
-        linux_codename = "lsb_release -c -s".execute().text.trim();
-        linux_release = "lsb_release -r -s".execute().text.trim();
+      linux_flavor = "lsb_release -i -s".execute().text.trim();
+      linux_codename = "lsb_release -c -s".execute().text.trim();
+      linux_release = "lsb_release -r -s".execute().text.trim();
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/Shell.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/Shell.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/Shell.groovy
index bddff00..b105ee9 100644
--- a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/Shell.groovy
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/shell/Shell.groovy
@@ -67,10 +67,10 @@ class Shell {
    */
   Shell exec(Object... args) {
     def proc = user ? "sudo -u $user PATH=${System.getenv('PATH')} $shell".execute() :
-                                    "$shell".execute()
+      "$shell".execute()
     script = args.join("\n")
     if (LOG.isTraceEnabled()) {
-        LOG.trace("${shell} << __EOT__\n${script}\n__EOT__");
+      LOG.trace("${shell} << __EOT__\n${script}\n__EOT__");
     }
 
     Thread.start {
@@ -86,8 +86,7 @@ class Shell {
     // empty String
     if (baosErr.size() != 0) {
       err = baosErr.toString().split('\n');
-    }
-    else {
+    } else {
       err = new ArrayList<String>();
     }
 
@@ -95,15 +94,15 @@ class Shell {
     ret = proc.exitValue()
 
     if (LOG.isTraceEnabled()) {
-        if (ret != 0) {
-           LOG.trace("return: $ret");
-        }
-        if (out.size() != 0) {
-           LOG.trace("\n<stdout>\n${out.join('\n')}\n</stdout>");
-        }
-        if (err.size() != 0) {
-           LOG.trace("\n<stderr>\n${err.join('\n')}\n</stderr>");
-        }
+      if (ret != 0) {
+        LOG.trace("return: $ret");
+      }
+      if (out.size() != 0) {
+        LOG.trace("\n<stdout>\n${out.join('\n')}\n</stdout>");
+      }
+      if (err.size() != 0) {
+        LOG.trace("\n<stderr>\n${err.join('\n')}\n</stderr>");
+      }
     }
 
     return this
@@ -123,7 +122,7 @@ class Shell {
    */
   @Override
   String toString() {
-    return signCorrectedReturnCode() + " =>\"" + (script ?: "(no script)") +"\""
+    return signCorrectedReturnCode() + " =>\"" + (script ?: "(no script)") + "\""
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JUnitUtilsTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JUnitUtilsTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JUnitUtilsTest.groovy
index 1fe3d03..bf66351 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JUnitUtilsTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JUnitUtilsTest.groovy
@@ -28,7 +28,7 @@ public class JUnitUtilsTest {
   @AfterClass
   static void tearDown() {
     def testReports = ['TEST-org.apache.bigtop.itest.DummyTestError.xml', 'TEST-org.apache.bigtop.itest.DummyTestFail.xml',
-                       'TEST-org.apache.bigtop.itest.DummyTestPass.xml', '/tmp/TEST-org.apache.bigtop.itest.DummyTestPass.xml'];
+      'TEST-org.apache.bigtop.itest.DummyTestPass.xml', '/tmp/TEST-org.apache.bigtop.itest.DummyTestPass.xml'];
     testReports.each {
       (new File(it)).delete()
     }
@@ -36,22 +36,22 @@ public class JUnitUtilsTest {
 
   @Test
   void testPassingTest() {
-    assertTrue('DummyPass test is reported as failing', 
-               JUnitUtils.executeTests(DummyTestPass.class));
+    assertTrue('DummyPass test is reported as failing',
+      JUnitUtils.executeTests(DummyTestPass.class));
   }
-  
+
   @Test
   void testFailingTest() {
     println('Disclaimer: This test expected to show a failure in an embeded testcase')
     assertFalse('DummyFail test is reported as passing',
-               JUnitUtils.executeTests(DummyTestPass.class, DummyTestFail.class));
+      JUnitUtils.executeTests(DummyTestPass.class, DummyTestFail.class));
   }
 
   @Test
   void testErrorTest() {
     println('Disclaimer: This test expected to show an error in an embeded testcase')
     assertFalse('DummyFail test is reported as passing',
-               JUnitUtils.executeTests(DummyTestError.class));
+      JUnitUtils.executeTests(DummyTestError.class));
   }
 
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JarContentTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JarContentTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JarContentTest.groovy
index c49f657..b583d8a 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JarContentTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/JarContentTest.groovy
@@ -29,6 +29,7 @@ class JarContentTest {
     def list = JarContent.listContent(env['JAVA_HOME'] + '/lib/tools.jar');
     assertTrue("Jar content should be greater than 10", list.size() > 10);
   }
+
   @Test(expected = IOException.class)
   void testJarContentNeg() {
     def env = System.getenv();
@@ -36,7 +37,7 @@ class JarContentTest {
       JarContent.listContent(env['JAVA_HOME'] + '/lib/nofilelikethat.jar').each {
         println it;
       }
-      assert("IOException should have been thrown");
+      assert ("IOException should have been thrown");
     } catch (IOException e) {
       throw e;
     };
@@ -61,10 +62,10 @@ class JarContentTest {
   @Test
   void testGetJarName() {
     assertEquals("Should've find tools.jar file",
-        'tools.jar',
-        JarContent.getJarName(System.getenv()['JAVA_HOME'] + '/lib/', 't.*.jar'));
+      'tools.jar',
+      JarContent.getJarName(System.getenv()['JAVA_HOME'] + '/lib/', 't.*.jar'));
     assertEquals("Should not have found tools.jar file", null,
-        JarContent.getJarName(System.getenv()['JAVA_HOME'] + '/lib/', 'nosuch-file.*.jar'));
+      JarContent.getJarName(System.getenv()['JAVA_HOME'] + '/lib/', 'nosuch-file.*.jar'));
   }
 
   // ClassNotException is expected to be thrown in case of non-existing class

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractGroovy.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractGroovy.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractGroovy.groovy
index 9cfc06c..625a718 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractGroovy.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractGroovy.groovy
@@ -29,16 +29,16 @@ import org.apache.bigtop.itest.Variable;
 
 @Contract(
   properties = [
-    @Property(name="foo.int1", type=Property.Type.INT, intValue=1000),
-    @Property(name="foo.int2", type=Property.Type.INT),
-    @Property(name="foo.bar1", type=Property.Type.STRING, defaultValue="xyz"),
-    @Property(name="foo.bar2", type=Property.Type.STRING),
-    @Property(name="foo.bool1", type=Property.Type.BOOLEAN),
-    @Property(name="foo.bool2", type=Property.Type.BOOLEAN)
+  @Property(name = "foo.int1", type = Property.Type.INT, intValue = 1000),
+  @Property(name = "foo.int2", type = Property.Type.INT),
+  @Property(name = "foo.bar1", type = Property.Type.STRING, defaultValue = "xyz"),
+  @Property(name = "foo.bar2", type = Property.Type.STRING),
+  @Property(name = "foo.bool1", type = Property.Type.BOOLEAN),
+  @Property(name = "foo.bool2", type = Property.Type.BOOLEAN)
   ],
   env = [
-    @Variable(name="HOME"),
-    @Variable(name="BIGTOP_UNLIKELY_FOO_ENV", required=false)
+  @Variable(name = "HOME"),
+  @Variable(name = "BIGTOP_UNLIKELY_FOO_ENV", required = false)
   ]
 )
 class TestContractGroovy {
@@ -64,24 +64,24 @@ class TestContractGroovy {
   @Test
   void testPropSettings() {
     assertEquals("checking the value of foo_int1 from default value",
-                  1000, foo_int1);
+      1000, foo_int1);
     assertEquals("checking the value of foo_int2 from foo.int2",
-                  100, foo_int2);
+      100, foo_int2);
     assertEquals("checking the value of foo_bar1 from default value",
-                  "xyz", foo_bar1);
+      "xyz", foo_bar1);
     assertEquals("checking the value of foo_bar2 from unset value",
-                  "", foo_bar2);
+      "", foo_bar2);
     assertEquals("checking the value of foo_bool1 from unset value",
-                  false, foo_bool1);
+      false, foo_bool1);
     assertEquals("checking the value of foo_bar2 from foo.bool2",
-                  true, foo_bool2);
+      true, foo_bool2);
   }
 
   @Test
   void testEnvSettings() {
     assertEquals("checking the value of \$HOME",
-                 System.getenv("HOME"), HOME);
+      System.getenv("HOME"), HOME);
     assertEquals("checking the value of \$BIGTOP_UNLIKELY_FOO_ENV",
-                 null, BIGTOP_UNLIKELY_FOO_ENV);
+      null, BIGTOP_UNLIKELY_FOO_ENV);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJava.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJava.java b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJava.java
index 6210835..432ce1e 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJava.java
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJava.java
@@ -20,6 +20,7 @@ package org.apache.bigtop.itest;
 
 import org.junit.BeforeClass;
 import org.junit.Test;
+
 import static org.junit.Assert.*;
 
 import org.apache.bigtop.itest.Contract;
@@ -28,60 +29,60 @@ import org.apache.bigtop.itest.Property;
 import org.apache.bigtop.itest.Variable;
 
 @Contract(
-        properties = {
-                @Property(name="foo.int1", type=Property.Type.INT, intValue=1000),
-                @Property(name="foo.int2", type=Property.Type.INT),
-                @Property(name="foo.bar1", type=Property.Type.STRING, defaultValue="xyz"),
-                @Property(name="foo.bar2", type=Property.Type.STRING),
-                @Property(name="foo.bool1", type=Property.Type.BOOLEAN),
-                @Property(name="foo.bool2", type=Property.Type.BOOLEAN)
-        },
-        env = {
-                @Variable(name="HOME"),
-                @Variable(name="BIGTOP_UNLIKELY_FOO_ENV", required=false)
-        }
+    properties = {
+        @Property(name = "foo.int1", type = Property.Type.INT, intValue = 1000),
+        @Property(name = "foo.int2", type = Property.Type.INT),
+        @Property(name = "foo.bar1", type = Property.Type.STRING, defaultValue = "xyz"),
+        @Property(name = "foo.bar2", type = Property.Type.STRING),
+        @Property(name = "foo.bool1", type = Property.Type.BOOLEAN),
+        @Property(name = "foo.bool2", type = Property.Type.BOOLEAN)
+    },
+    env = {
+        @Variable(name = "HOME"),
+        @Variable(name = "BIGTOP_UNLIKELY_FOO_ENV", required = false)
+    }
 )
 public class TestContractJava {
-    public static int foo_int1;
-    public static int foo_int2;
-    protected static String foo_bar1;
-    protected static String foo_bar2;
-    private static boolean foo_bool1;
-    private static boolean foo_bool2;
+  public static int foo_int1;
+  public static int foo_int2;
+  protected static String foo_bar1;
+  protected static String foo_bar2;
+  private static boolean foo_bool1;
+  private static boolean foo_bool2;
 
-    static String HOME;
-    static String BIGTOP_UNLIKELY_FOO_ENV;
+  static String HOME;
+  static String BIGTOP_UNLIKELY_FOO_ENV;
 
-    @BeforeClass
-    public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
-        System.setProperty("foo.int2", "100");
-        System.setProperty("foo.bool2", "true");
+  @BeforeClass
+  public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
+    System.setProperty("foo.int2", "100");
+    System.setProperty("foo.bool2", "true");
 
-        ParameterSetter.setProperties(TestContractJava.class);
-        ParameterSetter.setEnv(TestContractJava.class);
-    }
+    ParameterSetter.setProperties(TestContractJava.class);
+    ParameterSetter.setEnv(TestContractJava.class);
+  }
 
-    @Test
-    public void testPropSettings() {
-        assertEquals("checking the value of foo_int1 from default value",
-                1000, foo_int1);
-        assertEquals("checking the value of foo_int2 from foo.int2",
-                100, foo_int2);
-        assertEquals("checking the value of foo_bar1 from default value",
-                "xyz", foo_bar1);
-        assertEquals("checking the value of foo_bar2 from unset value",
-                "", foo_bar2);
-        assertEquals("checking the value of foo_bool1 from unset value",
-                false, foo_bool1);
-        assertEquals("checking the value of foo_bar2 from foo.bool2",
-                true, foo_bool2);
-    }
+  @Test
+  public void testPropSettings() {
+    assertEquals("checking the value of foo_int1 from default value",
+        1000, foo_int1);
+    assertEquals("checking the value of foo_int2 from foo.int2",
+        100, foo_int2);
+    assertEquals("checking the value of foo_bar1 from default value",
+        "xyz", foo_bar1);
+    assertEquals("checking the value of foo_bar2 from unset value",
+        "", foo_bar2);
+    assertEquals("checking the value of foo_bool1 from unset value",
+        false, foo_bool1);
+    assertEquals("checking the value of foo_bar2 from foo.bool2",
+        true, foo_bool2);
+  }
 
-    @Test
-    public void testEnvSettings() {
-        assertEquals("checking the value of $HOME",
-                System.getenv("HOME"), HOME);
-        assertEquals("checking the value of $BIGTOP_UNLIKELY_FOO_ENV",
-                null, BIGTOP_UNLIKELY_FOO_ENV);
-    }
+  @Test
+  public void testEnvSettings() {
+    assertEquals("checking the value of $HOME",
+        System.getenv("HOME"), HOME);
+    assertEquals("checking the value of $BIGTOP_UNLIKELY_FOO_ENV",
+        null, BIGTOP_UNLIKELY_FOO_ENV);
+  }
 }


[4/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across test-artifacts

Posted by db...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
index 557266b..cc9a513 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestContractJavaProc.java
@@ -20,6 +20,7 @@ package org.apache.bigtop.itest;
 
 import org.junit.BeforeClass;
 import org.junit.Test;
+
 import static org.junit.Assert.*;
 
 import org.apache.bigtop.itest.Contract;
@@ -28,62 +29,62 @@ import org.apache.bigtop.itest.Property;
 import org.apache.bigtop.itest.Variable;
 
 @Contract(
-        properties = {
-                @Property(name="foo.int1", type=Property.Type.INT, intValue=1000),
-                @Property(name="foo.int2", type=Property.Type.INT),
-                @Property(name="foo.bar1", type=Property.Type.STRING, defaultValue="xyz"),
-                @Property(name="foo.bar2", type=Property.Type.STRING),
-                @Property(name="foo.bool1", type=Property.Type.BOOLEAN),
-                @Property(name="foo.bool2", type=Property.Type.BOOLEAN)
-        },
-        env = {
-                @Variable(name="HOME"),
-                @Variable(name="BIGTOP_UNLIKELY_FOO_ENV", required=false)
-        }
+    properties = {
+        @Property(name = "foo.int1", type = Property.Type.INT, intValue = 1000),
+        @Property(name = "foo.int2", type = Property.Type.INT),
+        @Property(name = "foo.bar1", type = Property.Type.STRING, defaultValue = "xyz"),
+        @Property(name = "foo.bar2", type = Property.Type.STRING),
+        @Property(name = "foo.bool1", type = Property.Type.BOOLEAN),
+        @Property(name = "foo.bool2", type = Property.Type.BOOLEAN)
+    },
+    env = {
+        @Variable(name = "HOME"),
+        @Variable(name = "BIGTOP_UNLIKELY_FOO_ENV", required = false)
+    }
 )
 public class TestContractJavaProc {
-    public static int foo_int1;
-    public static int foo_int2;
-    protected static String foo_bar1;
-    protected static String foo_bar2;
-    private static boolean foo_bool1;
-    private static boolean foo_bool2;
+  public static int foo_int1;
+  public static int foo_int2;
+  protected static String foo_bar1;
+  protected static String foo_bar2;
+  private static boolean foo_bool1;
+  private static boolean foo_bool2;
 
-    static String HOME;
-    static String BIGTOP_UNLIKELY_FOO_ENV;
+  static String HOME;
+  static String BIGTOP_UNLIKELY_FOO_ENV;
 
-    @BeforeClass
-    public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
-        System.setProperty("foo.int2", "100");
-        System.setProperty("foo.bool2", "true");
+  @BeforeClass
+  public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
+    System.setProperty("foo.int2", "100");
+    System.setProperty("foo.bool2", "true");
 
-        ParameterSetter.setProperties(TestContractJavaProc.class,
-                new String[] { "foo_int1", "foo_int2", "foo_bar1", "foo_bar2", "foo_bool1", "foo_bool2" });
-        ParameterSetter.setEnv(TestContractJavaProc.class,
-                new String[] { "HOME", "BIGTOP_UNLIKELY_FOO_ENV"});
-    }
+    ParameterSetter.setProperties(TestContractJavaProc.class,
+        new String[]{"foo_int1", "foo_int2", "foo_bar1", "foo_bar2", "foo_bool1", "foo_bool2"});
+    ParameterSetter.setEnv(TestContractJavaProc.class,
+        new String[]{"HOME", "BIGTOP_UNLIKELY_FOO_ENV"});
+  }
 
-    @Test
-    public void testPropSettings() {
-        assertEquals("checking the value of foo_int1 from default value",
-                1000, foo_int1);
-        assertEquals("checking the value of foo_int2 from foo.int2",
-                100, foo_int2);
-        assertEquals("checking the value of foo_bar1 from default value",
-                "xyz", foo_bar1);
-        assertEquals("checking the value of foo_bar2 from unset value",
-                "", foo_bar2);
-        assertEquals("checking the value of foo_bool1 from unset value",
-                false, foo_bool1);
-        assertEquals("checking the value of foo_bar2 from foo.bool2",
-                true, foo_bool2);
-    }
+  @Test
+  public void testPropSettings() {
+    assertEquals("checking the value of foo_int1 from default value",
+        1000, foo_int1);
+    assertEquals("checking the value of foo_int2 from foo.int2",
+        100, foo_int2);
+    assertEquals("checking the value of foo_bar1 from default value",
+        "xyz", foo_bar1);
+    assertEquals("checking the value of foo_bar2 from unset value",
+        "", foo_bar2);
+    assertEquals("checking the value of foo_bool1 from unset value",
+        false, foo_bool1);
+    assertEquals("checking the value of foo_bar2 from foo.bool2",
+        true, foo_bool2);
+  }
 
-    @Test
-    public void testEnvSettings() {
-        assertEquals("checking the value of $HOME",
-                System.getenv("HOME"), HOME);
-        assertEquals("checking the value of $BIGTOP_UNLIKELY_FOO_ENV",
-                null, BIGTOP_UNLIKELY_FOO_ENV);
-    }
+  @Test
+  public void testEnvSettings() {
+    assertEquals("checking the value of $HOME",
+        System.getenv("HOME"), HOME);
+    assertEquals("checking the value of $BIGTOP_UNLIKELY_FOO_ENV",
+        null, BIGTOP_UNLIKELY_FOO_ENV);
+  }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
index 29344cd..1a7a2c7 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/TestListUtilsTest.groovy
@@ -34,7 +34,7 @@ public class TestListUtilsTest {
 
     TestListUtils.touchTestFiles(prefix, "${fileName}.xml");
     assertTrue("only .class files are expected to be created",
-               expectedFile.getParentFile().listFiles().size() == 0);
+      expectedFile.getParentFile().listFiles().size() == 0);
 
     File p = new File(prefix);
     p.deleteDir();

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
index a36cc1a..60be92a 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/failures/IntegrationTestClusterFailures.groovy
@@ -67,7 +67,7 @@ public class IntegrationTestClusterFailures {
       Thread.sleep(SLEEP_TIME)
     }
 
-    try{
+    try {
       assert !isCronRunning(), "$CRON_SERVICE hasn't been stopped as expected:"
       println "$CRON_SERVICE stopped. Good."
     } finally {
@@ -104,7 +104,7 @@ public class IntegrationTestClusterFailures {
       Thread.sleep(SLEEP_TIME)
     }
 
-    try{
+    try {
       assert !isCronRunning(), "$CRON_SERVICE hasn't been killed as expected:"
       println "$CRON_SERVICE killed. Good."
     } finally {
@@ -137,7 +137,7 @@ public class IntegrationTestClusterFailures {
       Thread.sleep(SLEEP_TIME)
     }
 
-    try{
+    try {
       assert !isRemoteHostReachable(), "Connection to $testRemoteHost hasn't been killed as expected:"
       println "$testRemoteHost isn't reachable. Good."
     } finally {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
index 0ba3627..16bbd34 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedParameterizedTest.groovy
@@ -30,13 +30,13 @@ class OrderedParameterizedTest {
   int parameter;
   static List order = [];
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   public void lateTest() {
     order.add(1);
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   public void earlyTest() {
     order.add(-1);
@@ -61,6 +61,6 @@ class OrderedParameterizedTest {
   @AfterClass
   static void verifyOrder() {
     assertEquals("tests were NOT executed in the desired order",
-                 [-1, 0, 1], order);
+      [-1, 0, 1], order);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
index 8dbe873..8815b42 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/junit/OrderedTest.groovy
@@ -29,13 +29,13 @@ import static org.junit.Assert.assertEquals
 class OrderedTest {
   static List order = [];
 
-  @RunStage(level=1)
+  @RunStage(level = 1)
   @Test
   public void lateTest() {
     order.add(1);
   }
 
-  @RunStage(level=-1)
+  @RunStage(level = -1)
   @Test
   public void earlyTest() {
     order.add(-1);
@@ -49,6 +49,6 @@ class OrderedTest {
   @AfterClass
   static void verifyOrder() {
     assertEquals("tests were NOT executed in the desired order",
-                 [-1, 0, 1], order);
+      [-1, 0, 1], order);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
index bb8f6d7..5d67fed 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/pmanager/PackageManagerTest.groovy
@@ -45,7 +45,9 @@ class PackageManagerTest {
   void searchForGcc() {
     List<PackageInstance> pkgs = pmgr.search("gcc")
 
-    assertFalse("gcc not found in repository", pkgs.findAll({return it.name =~ /^gcc.*/}).size() == 0)
+    assertFalse("gcc not found in repository", pkgs.findAll({
+      return it.name =~ /^gcc.*/
+    }).size() == 0)
   }
 
   @Test
@@ -83,7 +85,7 @@ class PackageManagerTest {
   void testGetContentList() {
     PackageInstance cron = PackageInstance.getPackageInstance(pmgr, CRON_RPM);
     List<String> list = pmgr.getContentList(cron);
-    list.each { println it};
+    list.each { println it };
 
     assertTrue("cron package is expected to contain at least ten files", list.size() > 10);
   }
@@ -92,10 +94,10 @@ class PackageManagerTest {
   void testGetDocs() {
     PackageInstance cron = PackageInstance.getPackageInstance(pmgr, CRON_RPM);
     List<String> list = pmgr.getDocs(cron);
-    list.each { println it};
+    list.each { println it };
 
     assertTrue("checking for docs in cron package",
-               list.size() > ((pmgr.getType() == "apt") ? -1 : 0));
+      list.size() > ((pmgr.getType() == "apt") ? -1 : 0));
   }
 
   @Test
@@ -104,14 +106,14 @@ class PackageManagerTest {
     Map<String, String> deps = bash.getDeps();
 
     assertTrue("package bash has 0 dependencies. weird.",
-               deps.size() > 0);
+      deps.size() > 0);
   }
 
   @Test
   void testGetConfigs() {
     PackageInstance cron = PackageInstance.getPackageInstance(pmgr, CRON_RPM);
     List<String> list = pmgr.getConfigs(cron);
-    list.each { println it};
+    list.each { println it };
 
     assertTrue("cron package is expected to contain at least a few config files", list.size() > 0);
   }
@@ -121,9 +123,9 @@ class PackageManagerTest {
   void testRepoManagement() {
     String repo_id = "test-repo";
     assertEquals("Can not add repo",
-                 0, pmgr.addBinRepo(repo_id, "http://127.0.0.1", null, "random strings here"));
+      0, pmgr.addBinRepo(repo_id, "http://127.0.0.1", null, "random strings here"));
     assertEquals("Can not remove repo",
-                 0, pmgr.removeBinRepo(repo_id));
+      0, pmgr.removeBinRepo(repo_id));
   }
 
   @Ignore("required sudo")
@@ -131,8 +133,8 @@ class PackageManagerTest {
   void testRepoFileManagement() {
     String repo_id = "test-repo";
     assertEquals("Can not add repo",
-                 0, pmgr.addBinRepo(repo_id, "random strings here"));
+      0, pmgr.addBinRepo(repo_id, "random strings here"));
     assertEquals("Can not remove repo",
-                 0, pmgr.removeBinRepo(repo_id));
+      0, pmgr.removeBinRepo(repo_id));
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
index 316160f..5f0ffcb 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/AlternativeTest.groovy
@@ -33,10 +33,10 @@ class AlternativeTest {
 
     Map groups = Alternative.getAlternatives();
     assertTrue("not a single alternative group found. weird.",
-               groups.size() >0);
+      groups.size() > 0);
     assertTrue("there is no alternative for editor. weird.",
-               groups["editor"] != null);
+      groups["editor"] != null);
     assertTrue("in the editor alternative there are no actuall alternatives",
-               groups["editor"].getAlts().size() > 0);
+      groups["editor"].getAlts().size() > 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
index d2c4fe4..7924313 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/ServiceTest.groovy
@@ -34,11 +34,11 @@ class ServiceTest {
     assertEquals("wrong service name", name, svc.getName());
   }
 
-  @Ignore("requires chkconfig") 
+  @Ignore("requires chkconfig")
   @Test
   void testRunLevels() {
     List<String> l = svc.getRunLevels();
     assertTrue("Expected a non-zero size list of registered run levels for ssh service",
-               0 != l.size());
+      0 != l.size());
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
index 0545dae..6c38881 100644
--- a/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
+++ b/bigtop-test-framework/src/test/groovy/org/apache/bigtop/itest/posix/UGITest.groovy
@@ -27,12 +27,12 @@ class UGITest {
   @Test
   void testUsers() {
     assertEquals("expect root uid to be 0",
-                 "0", ugi.getUsers()["root"]["uid"]);
+      "0", ugi.getUsers()["root"]["uid"]);
   }
 
   @Test
   void testGroups() {
     assertEquals("expect root gid to be 0",
-                 "0", ugi.getGroups()["root"]["gid"]);
+      "0", ugi.getGroups()["root"]["gid"]);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy b/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
index 7b7012c..7edca58 100644
--- a/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
+++ b/bigtop-tests/test-artifacts/crunch/src/main/groovy/org/apache/bigtop/itest/crunchsmoke/TestCrunchSmoke.groovy
@@ -29,7 +29,7 @@ public class TestCrunchSmoke {
   static String runnerScript = "hadoop jar"
 
   static String crunchJar = System.getProperty(
-    "org.apache.bigtop.itest.crunch.smoke.crunch.jar", 
+    "org.apache.bigtop.itest.crunch.smoke.crunch.jar",
     "/usr/share/doc/crunch*/crunch-examples-*job.jar");
 
   static Shell sh = new Shell("/bin/bash -s");
@@ -43,37 +43,37 @@ public class TestCrunchSmoke {
 
   static Map examples =
     [
-        WordCount             : "${EXAMPLES}/text/pg11.txt $EXAMPLES_OUT",
-        SecondarySortExample  : "${EXAMPLES}/text/secondary_sort_input.txt ${EXAMPLES_OUT}",
-        AverageBytesByIP      : "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}",
-        TotalBytesByIP        : "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}"
+      WordCount: "${EXAMPLES}/text/pg11.txt $EXAMPLES_OUT",
+      SecondarySortExample: "${EXAMPLES}/text/secondary_sort_input.txt ${EXAMPLES_OUT}",
+      AverageBytesByIP: "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}",
+      TotalBytesByIP: "${EXAMPLES}/access_log/000000 ${EXAMPLES_OUT}"
     ];
 
   private void _runExampleJobs(String algorithm) {
     sh.exec("hadoop fs -rmr ${EXAMPLES_OUT}");
-    sh.exec("${runnerScript} ${crunchJar}" 
+    sh.exec("${runnerScript} ${crunchJar}"
       + " org.apache.crunch.examples.${algorithm}"
       + " ${examples.get(algorithm)}"
-      );
+    );
     assertEquals("running Crunch example failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testWordCount() {
     _runExampleJobs("WordCount");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSecondarySort() {
     _runExampleJobs("SecondarySortExample");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testAverageBytesByIP() {
     _runExampleJobs("AverageBytesByIP");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testTotalBytesByIP() {
     _runExampleJobs("TotalBytesByIP");
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy b/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
index 6718146..85a016e 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
+++ b/bigtop-tests/test-artifacts/flume/src/main/groovy/org/apache/bigtop/itest/flumesmoke/TestFlumeSmoke.groovy
@@ -42,7 +42,7 @@ class TestFlumeSmoke {
 
   @BeforeClass
   static void setUp() {
-    JarContent.unpackJarContainer(TestFlumeSmoke.class, '.' , null);
+    JarContent.unpackJarContainer(TestFlumeSmoke.class, '.', null);
   }
 
   @AfterClass
@@ -54,27 +54,27 @@ class TestFlumeSmoke {
     String node_config = "node:text(\"events.txt\")|collectorSink(\"${hdfs_sink_dir}\",\"data\");";
 
     sh.exec("export FLUME_CONF_DIR=./${id}",
-            "flume node_nowatch -s -1 -n node -c '${node_config}'");
+      "flume node_nowatch -s -1 -n node -c '${node_config}'");
     assertEquals("Flume failed to accept events",
-                 0, sh.ret);
+      0, sh.ret);
 
     sh.exec("hadoop fs -cat ${hdfs_sink_dir}/${glob} | ${decompress} | wc -l");
     assertEquals("Wrong # of lines in output found at ${hdfs_sink_dir}",
-                 "10000", sh.out[0]);
+      "10000", sh.out[0]);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testBzip2() {
     compressionCommonTest("FlumeSmokeBzip2", "bzip2 -d", "*.bz2");
   }
 
   @Ignore("BIGTOP-218")
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testDeflate() {
     compressionCommonTest("FlumeSmokeDeflate", "perl -MCompress::Zlib -e 'undef \$/; print uncompress(<>)'", "*.deflate");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testGzip() {
     compressionCommonTest("FlumeSmokeGzip", "gzip -d", "*.gz");
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
index cfd43e1..4981d25 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
+++ b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeBzip2/flume-site.xml
@@ -1,4 +1,5 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 <!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +16,6 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 
 <configuration>
   <property> 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
index bc065da..78d1efc 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
+++ b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeDeflate/flume-site.xml
@@ -1,4 +1,5 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 <!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +16,6 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 
 <configuration>
   <property> 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
index 75d7c26..4c3948b 100644
--- a/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
+++ b/bigtop-tests/test-artifacts/flume/src/main/resources/FlumeSmokeGzip/flume-site.xml
@@ -1,4 +1,5 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 <!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +16,6 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl"  href="configuration.xsl"?>
 
 <configuration>
   <property> 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy b/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
index dd82c2f..b213130 100644
--- a/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
+++ b/bigtop-tests/test-artifacts/giraph/src/main/groovy/org/apache/bigtop/itest/giraphsmoke/TestGiraphSmoke.groovy
@@ -32,7 +32,7 @@ public class TestGiraphSmoke {
 
   static Shell sh = new Shell("/bin/bash -s")
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testPageRankBenchmark() {
     sh.exec("${runnerScript} ${giraphJar}"
       + " org.apache.giraph.benchmark.PageRankBenchmark"
@@ -41,11 +41,11 @@ public class TestGiraphSmoke {
       + " -s 3"        // number of supersteps
       + " -V 100000"   // aggregate vertices
       + " -w 3"        // workers
-      )
+    )
     assertEquals("running PageRankBenchmark failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testRandomMessageBenchmark() {
     sh.exec("${runnerScript} ${giraphJar}"
       + " org.apache.giraph.benchmark.RandomMessageBenchmark"
@@ -56,11 +56,11 @@ public class TestGiraphSmoke {
       + " -w 3"        // workers
       + " -n 10"       // Number of messages per edge
       + " -b 100"      // size of each message in bytes
-      )
+    )
     assertEquals("running RandomMessageBenchmark failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSimpleCheckpointVertex() {
     sh.exec("hadoop fs -rmr ${testDir}");
     sh.exec("${runnerScript} ${giraphJar}"
@@ -69,37 +69,37 @@ public class TestGiraphSmoke {
       + " -s 3"        // number of supersteps
       + " -w 3"        // workers
       + " -o ${testDir}"
-      )
+    )
     assertEquals("running SimpleCheckpointVertex failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSimpleVertexWithWorkerContext() {
     sh.exec("hadoop fs -rmr ${testDir}");
     sh.exec("${runnerScript} ${giraphJar}"
       + " org.apache.giraph.examples.SimpleVertexWithWorkerContext"
       + " ${testDir} 3"
-      )
+    )
     assertEquals("running SimpleCheckpointVertex failed", sh.getRet(), 0);
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSimpleShortestPathsVertex() {
     // A graph definition: 
     //   [vertex id, vertex value, [[edge1, value1], .. [edgeN, valueN]]] 
-    List graphDescription=[[0, 0, [[1,1], [2,2]]],
-                           [1, 1, [[2,2], [3,3]]],
-                           [2, 2, [[3,3], [4,4]]],
-                           [3, 3, [[4,4], [5,5]]],
-                           [4, 4, [[5,5], [0,0]]],
-                           [5, 5, [[0,0], [1,1]]]];
-    int partitionSize=2;
+    List graphDescription = [[0, 0, [[1, 1], [2, 2]]],
+      [1, 1, [[2, 2], [3, 3]]],
+      [2, 2, [[3, 3], [4, 4]]],
+      [3, 3, [[4, 4], [5, 5]]],
+      [4, 4, [[5, 5], [0, 0]]],
+      [5, 5, [[0, 0], [1, 1]]]];
+    int partitionSize = 2;
 
     sh.exec("hadoop fs -rmr ${testDir}",
-            "hadoop fs -mkdir ${testDir}/input");
+      "hadoop fs -mkdir ${testDir}/input");
 
-    for (int i=0; i<graphDescription.size(); i+=partitionSize)  {
-      String part = graphDescription[i..(i+partitionSize-1)].join("\n");
+    for (int i = 0; i < graphDescription.size(); i += partitionSize) {
+      String part = graphDescription[i..(i + partitionSize - 1)].join("\n");
       int partId = i / partitionSize;
       sh.exec("hadoop fs -put <(echo '${part}') ${testDir}/input/part-m-${partId}");
     }
@@ -109,7 +109,7 @@ public class TestGiraphSmoke {
       + " ${testDir}/input"
       + " ${testDir}/output"
       + " 0 ${graphDescription.size() / partitionSize}"
-      )
+    )
     assertEquals("running SimpleShortestPathsVertex failed", sh.getRet(), 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
index df24e69..8de9b30 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestCLI.java
@@ -68,7 +68,7 @@ public class TestCLI extends CLITestHelper {
     clitestDataDir = new File(TEST_CACHE_DATA_DIR).toURI().toString().replace(' ', '+');
 
     String[] createTestcliDirCmds = {
-        "hadoop fs -mkdir -p "  + TEST_DIR_ABSOLUTE,
+        "hadoop fs -mkdir -p " + TEST_DIR_ABSOLUTE,
         "hadoop fs -chmod 777 " + TEST_DIR_ABSOLUTE
     };
     shHDFS.exec(createTestcliDirCmds);
@@ -81,7 +81,7 @@ public class TestCLI extends CLITestHelper {
     // We can't just use conf.setInt(fs.trash.interval",0) because if trash is
     // enabled on the server, client configuration value is ignored.
     Assert.assertEquals("HDFS trash should be disabled via fs.trash.interval",
-        0, conf.getInt("fs.trash.interval",0));
+        0, conf.getInt("fs.trash.interval", 0));
     Assert.assertEquals("This test needs to be run under root user of hcfs",
         System.getProperty("hcfs.root.username", "hdfs"),
         System.getProperty("user.name"));

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
index 84c9c42..2e06fbb 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy
@@ -46,7 +46,7 @@ public class TestFuseDFS {
   private static Shell sh = new Shell("/bin/bash -s");
   private static Shell shRoot = new Shell("/bin/bash -s", "root");
   private static String mount_point = System.
-      getProperty("fuse.dfs.mountpoint", "/tmp/hcfs-test");
+    getProperty("fuse.dfs.mountpoint", "/tmp/hcfs-test");
   static private Log LOG = LogFactory.getLog(Shell.class)
 
   /**
@@ -54,7 +54,7 @@ public class TestFuseDFS {
    * That allows this test to work on any file system, because its not
    * coupled to hadoop-fuse-dfs*/
   private static boolean isHDFS = "HDFS".
-      equals(System.getProperty("HCFS_IMPLEMENTATION", "HDFS"));
+    equals(System.getProperty("HCFS_IMPLEMENTATION", "HDFS"));
   private static String userdir = "${mount_point}/user/${username}";
   private static String testdir = "${userdir}/TestFuseDFS-testDir";
 
@@ -108,7 +108,7 @@ public class TestFuseDFS {
      * we go with them.  But that makes this test somewhat dependant
      * on working FUSE mount to begin with.*/
     sh.exec("mkdir -p ${testdir}");
-    assertEquals("Failed: mkdir basic setup !",0,sh.getRet());
+    assertEquals("Failed: mkdir basic setup !", 0, sh.getRet());
 
     /**
      * some tests will require a file system command to setup the test,
@@ -146,68 +146,68 @@ public class TestFuseDFS {
   @Test
   public void testCd() {
     testWrapper(//The test: Change to a directory.
-        "cd ${testdir} && pwd ",
-        //The lambda: Validates via running pwd.
-        {
-          LOG.info("After cd, pwd=" + sh.getOut()[0]);
-          assertEquals("Failed: testing contains '${testdir}' after change " +
-              "dir", true,
-              sh.getOut()[0].contains("${testdir}"));
-          assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
-        }//validator
+      "cd ${testdir} && pwd ",
+      //The lambda: Validates via running pwd.
+      {
+        LOG.info("After cd, pwd=" + sh.getOut()[0]);
+        assertEquals("Failed: testing contains '${testdir}' after change " +
+          "dir", true,
+          sh.getOut()[0].contains("${testdir}"));
+        assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
+      }//validator
     );
   }
 
   @Test
   public void testLs() {
     testWrapper(
-        "touch ${testdir}/non-trivial-fn",
-        "ls -altrh ${testdir}", //Test command : ls the dir.
-        {
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue("Failed: Confiring that total is shown in ls",
-              sh.getOut()[0].contains("total"));
-          //now, we expect the user name to be in the test
-          // directory, since
-          //user is the one who created the test directory.
-          assertTrue("Failed: Confirming that the non-trivial-fn is shown in " +
-              "ls " +
-              "" +
-              "" + sh
-              .getOut(),
-              sh.getOut().toString().contains("non-trivial-fn"));
-          assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
-        }//validator
+      "touch ${testdir}/non-trivial-fn",
+      "ls -altrh ${testdir}", //Test command : ls the dir.
+      {
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue("Failed: Confiring that total is shown in ls",
+          sh.getOut()[0].contains("total"));
+        //now, we expect the user name to be in the test
+        // directory, since
+        //user is the one who created the test directory.
+        assertTrue("Failed: Confirming that the non-trivial-fn is shown in " +
+          "ls " +
+          "" +
+          "" + sh
+          .getOut(),
+          sh.getOut().toString().contains("non-trivial-fn"));
+        assertEquals("Failed: exit code is non-zero", 0, sh.getRet());
+      }//validator
     );
   }
 
   @Test
   public void testMkDir() {
     testWrapper("mkdir ${testdir}/dir1 && cd ${testdir}/dir1 && pwd",
-        {
-          LOG.info(sh.getOut());
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue("Failed: Confirm that dir1 is the new working dir. ",
-              sh.getOut().toString().contains("${testdir}/dir1"));
-          assertEquals("Failed: mkdir under ${testdir} non-zero return code",
-              0,
-              sh.getRet());
-        } //validator
+      {
+        LOG.info(sh.getOut());
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue("Failed: Confirm that dir1 is the new working dir. ",
+          sh.getOut().toString().contains("${testdir}/dir1"));
+        assertEquals("Failed: mkdir under ${testdir} non-zero return code",
+          0,
+          sh.getRet());
+      } //validator
     );
   }
 
   @Test
   public void testTouch() {
     testWrapper("touch ${testdir}/file1 && ls ${testdir}",
-        {
-          LOG.info(sh.getOut());
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue("Failed: Confirm that file1 is created/listed ",
-              sh.getOut()[0].contains("file1"));
-          assertEquals("Failed: touch ${testdir}/file1 + ls return code " +
-              "non-zero", 0,
-              sh.getRet());
-        }//validator
+      {
+        LOG.info(sh.getOut());
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue("Failed: Confirm that file1 is created/listed ",
+          sh.getOut()[0].contains("file1"));
+        assertEquals("Failed: touch ${testdir}/file1 + ls return code " +
+          "non-zero", 0,
+          sh.getRet());
+      }//validator
     );
   }
 
@@ -222,21 +222,21 @@ public class TestFuseDFS {
     f.write("hi_bigtop\nhi_bigtop\n");
 
     testWrapper("/bin/cp -rf /tmp/FUSETEST_bigtop ${testdir}/cf2",
-        /**
-         * Required sleep:  IS HDFS FUSE Strictly consistent?
-         * Reveals HDFS-6072.*/
-        "sleep 2 && cat ${testdir}/cf2",
-        {
-          //contents of output stream should be "-hello bigtop-"
-          LOG.info("cat output = " + sh.getOut() + " " + sh.getErr() + " " +
-              sh.getRet());
-          def (out, err, ret) = [ sh.getOut(), sh.getErr(), sh.getRet() ];
-          //assert that FUSE mount calculates total line (ls -altrh)
-          assertTrue(
-              "Failed: cat didnt contain "+out,
-              out.contains("hi_bigtop"));
-          assertEquals("Failed: return code non-zero", 0, ret);
-        }//validator
+      /**
+       * Required sleep:  IS HDFS FUSE Strictly consistent?
+       * Reveals HDFS-6072.*/
+      "sleep 2 && cat ${testdir}/cf2",
+      {
+        //contents of output stream should be "-hello bigtop-"
+        LOG.info("cat output = " + sh.getOut() + " " + sh.getErr() + " " +
+          sh.getRet());
+        def (out, err, ret) = [sh.getOut(), sh.getErr(), sh.getRet()];
+        //assert that FUSE mount calculates total line (ls -altrh)
+        assertTrue(
+          "Failed: cat didnt contain " + out,
+          out.contains("hi_bigtop"));
+        assertEquals("Failed: return code non-zero", 0, ret);
+      }//validator
     );
   }
 
@@ -254,37 +254,37 @@ public class TestFuseDFS {
      * TODO: Determine if the length of this string effect consistency?
      * Small "contents" string might be another way to expose HDFS-6072.
      * */
-    final String contents="ABCDEFGHIJKLMNOPZUIRPIEOF";
-    final String setup="mkdir ${testdir}/targetdir &&"+
-        "echo ${contents} > ${testdir}/cp1 && "+
-        "echo ${contents} > ${testdir}/cp2 && " +
-        "/bin/cp -rf ${testdir}/cp* ${testdir}/targetdir/";
+    final String contents = "ABCDEFGHIJKLMNOPZUIRPIEOF";
+    final String setup = "mkdir ${testdir}/targetdir &&" +
+      "echo ${contents} > ${testdir}/cp1 && " +
+      "echo ${contents} > ${testdir}/cp2 && " +
+      "/bin/cp -rf ${testdir}/cp* ${testdir}/targetdir/";
     testWrapper(
-        setup,//Large setup function so we externalize it above.
-        {
-          def files = ["cp1", "cp2"];
+      setup,//Large setup function so we externalize it above.
+      {
+        def files = ["cp1", "cp2"];
 
-          assertEquals("Failed: ret code non-zero", 0, sh.getRet());
-          sh.exec("ls -altrh ${testdir}/targetdir/");
-          //assert that copy results in the new files
-          //at least in the directory namespace...
-          assertEquals("Failed: ls of target dir ret code non-zero", 0,
-              sh.getRet());
-          files.each() {
-            assertTrue("Failed: to find ${it} in target directory",
-                sh.getOut().toString().contains(it));
-          }
-          //Assert that the copy resulted in identical files
-          //Note that due to eventual consistency, etc, this is
-          //an important test for typical fuse behaviour and workload
-          files.each() {
-            sh.exec("diff " +"${testdir}/${it} "
-                +"${testdir}/targetdir/${it}");
-            assertTrue("Failed: Detected a difference between ${it} in " +
-                "${testdir} vs " + "the ${testdir}/targetdir diff=" +sh.out ,
-                sh.getRet().equals(0));
-          }
-        }//validator
+        assertEquals("Failed: ret code non-zero", 0, sh.getRet());
+        sh.exec("ls -altrh ${testdir}/targetdir/");
+        //assert that copy results in the new files
+        //at least in the directory namespace...
+        assertEquals("Failed: ls of target dir ret code non-zero", 0,
+          sh.getRet());
+        files.each() {
+          assertTrue("Failed: to find ${it} in target directory",
+            sh.getOut().toString().contains(it));
+        }
+        //Assert that the copy resulted in identical files
+        //Note that due to eventual consistency, etc, this is
+        //an important test for typical fuse behaviour and workload
+        files.each() {
+          sh.exec("diff " + "${testdir}/${it} "
+            + "${testdir}/targetdir/${it}");
+          assertTrue("Failed: Detected a difference between ${it} in " +
+            "${testdir} vs " + "the ${testdir}/targetdir diff=" + sh.out,
+            sh.getRet().equals(0));
+        }
+      }//validator
     );
   }
 
@@ -292,17 +292,17 @@ public class TestFuseDFS {
   public void testMv() {
     //test that move recursively moves stuff
     testWrapper("mkdir -p ${testdir}/subdir1 && touch " +
-        "${testdir}/subdir1/innerfile",
-        "mv ${testdir}/subdir1 ${testdir}/subdir2",
-        {
-          assertEquals("Failed: cp exit code != 0", 0, sh.getRet());
-          sh.exec("ls -altrh ${testdir}/subdir2/");
-          //assert that the inner file exists
-          assertTrue(sh.getOut().toString().contains("innerfile"));
-          //assert that original file is gone
-          sh.exec("ls -altrh ${testdir}");
-          assertTrue(!sh.getOut().toString().contains("subdir1"));
-        }//validator
+      "${testdir}/subdir1/innerfile",
+      "mv ${testdir}/subdir1 ${testdir}/subdir2",
+      {
+        assertEquals("Failed: cp exit code != 0", 0, sh.getRet());
+        sh.exec("ls -altrh ${testdir}/subdir2/");
+        //assert that the inner file exists
+        assertTrue(sh.getOut().toString().contains("innerfile"));
+        //assert that original file is gone
+        sh.exec("ls -altrh ${testdir}");
+        assertTrue(!sh.getOut().toString().contains("subdir1"));
+      }//validator
     );
   }
 
@@ -310,12 +310,12 @@ public class TestFuseDFS {
   @Test
   public void testRm() {
     testWrapper("touch ${testdir}/file-removed",
-        "rm ${testdir}/file-removed",
-        {
-          assertEquals("Failed: rm ret code non-zero", 0, sh.getRet());
-          sh.exec("ls ${testdir}");
-          assertTrue(!sh.getOut().toString().contains("file-removed"));
-        }//validator
+      "rm ${testdir}/file-removed",
+      {
+        assertEquals("Failed: rm ret code non-zero", 0, sh.getRet());
+        sh.exec("ls ${testdir}");
+        assertTrue(!sh.getOut().toString().contains("file-removed"));
+      }//validator
     );
   }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
index a75f016..f7bc04e 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestBlockRecovery.groovy
@@ -119,7 +119,7 @@ public class TestBlockRecovery {
     blockToTest = sh.exec("grep -o 'blk_[0-9]*' $outputFile").getOut()[0];
     assertTrue("Could not obtain block number", sh.getRet() == 0);
 
-    for (int i=0; i < dataDirs.length; i++) {
+    for (int i = 0; i < dataDirs.length; i++) {
       def dataDir = dataDirs[i]
       blockLocation = sh.exec("find $dataDir -name $blockToTest | grep $dataDir").getOut()[0];
       if (blockLocation != null) break;
@@ -137,7 +137,7 @@ public class TestBlockRecovery {
     assertTrue("Could not delete file $fsFilePath", sh.getRet() == 0);
     sh.exec("rm -rf $localTestDir");
     assertTrue("Could not delete test directory $localTestDir", sh.getRet() == 0);
-    }
+  }
 
   @Test
   public void testBlockRecovery() {
@@ -153,14 +153,14 @@ public class TestBlockRecovery {
     sh.exec("hadoop fs -cat $fsFilePath");
 
     // make sure checksum changes back to original, indicating block recovery
-    for (int j=0; j<3; j++) {
+    for (int j = 0; j < 3; j++) {
       // wait a bit to let the block recover
       sleep(sleepTime);
       // see if checksum has changed
       cksumError = sh.exec("hadoop fs -cat $fsFilePath | grep -o 'Checksum error'").getErr();
       if (cksumError != "Checksum error") break;
     }
-    assertNotNull ("Block has not been successfully triggered for recovery.", cksumError);
+    assertNotNull("Block has not been successfully triggered for recovery.", cksumError);
 
     nodesAfterRecovery = sh.exec("hdfs fsck $fsFilePath -blocks -locations -files | $grepIP").getOut();
     assertTrue("Could not obtain datanode addresses", sh.getRet() == 0);
@@ -174,7 +174,7 @@ public class TestBlockRecovery {
       assertTrue("Could not obtain datanode addresses", sh.getRet() == 0);
 
       blockRecoveryNode = (nodesBeforeRecovery.intersect(nodesAfterRecovery))[0];
-      assert (blockRecoveryNode.size() != 0) : "Block has not been successfully triggered for recovery."
+      assert (blockRecoveryNode.size() != 0): "Block has not been successfully triggered for recovery."
     }
 
     int cksumAttempt;

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
index bea1595..e4168f5 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy
@@ -27,14 +27,14 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestDFSAdmin {
- 
+
   // set debugging variable to true if you want error messages sent to stdout
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
 
   @BeforeClass
   public static void setUp() {
     // unpack resource
-    JarContent.unpackJarContainer(TestDFSAdmin.class, "." , null);
+    JarContent.unpackJarContainer(TestDFSAdmin.class, ".", null);
     System.out.println("Running DFSAdmin commands:");
   }
 
@@ -43,32 +43,32 @@ public class TestDFSAdmin {
   }
 
   @Test
-  public void testDFSbasic() { 
+  public void testDFSbasic() {
     // report
-    System.out.println("-report"); 
-    shHDFS.exec("hdfs dfsadmin -report");    
+    System.out.println("-report");
+    shHDFS.exec("hdfs dfsadmin -report");
     assertTrue("-report failed", shHDFS.getRet() == 0);
 
     // help
-    System.out.println("-help"); 
+    System.out.println("-help");
     shHDFS.exec("hdfs dfsadmin -help");
     assertTrue("-help failed", shHDFS.getRet() == 0);
 
     // printTopology
-    System.out.println("-printTopology"); 
+    System.out.println("-printTopology");
     shHDFS.exec("hdfs dfsadmin -printTopology");
     assertTrue("-printTopology failed", shHDFS.getRet() == 0);
 
     // metasave
     System.out.println("-metasave");
     shHDFS.exec("hdfs dfsadmin -metasave metasave_test");
-    assertTrue("-metasave failed", shHDFS.getRet() == 0); 
+    assertTrue("-metasave failed", shHDFS.getRet() == 0);
   }
 
   @Test
   public void testDFSsafemode() {
     // safemode
-    System.out.println("-safemode"); 
+    System.out.println("-safemode");
     shHDFS.exec("hdfs dfsadmin -safemode leave");
     assertTrue("-safemode leave failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -safemode get");
@@ -80,25 +80,25 @@ public class TestDFSAdmin {
     assertTrue("-safemode get failed", shHDFS.getOut().get(0) == "Safe mode is ON");
     assertTrue("-safemode get failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -safemode leave");
-    assertTrue("-safemode leave failed", shHDFS.getRet() == 0); 
+    assertTrue("-safemode leave failed", shHDFS.getRet() == 0);
   }
 
   @Test
   public void testDFSnamespace() {
     // saveNamespace
     System.out.println("-saveNamespace");
-    shHDFS.exec("hdfs dfsadmin -safemode enter"); 
+    shHDFS.exec("hdfs dfsadmin -safemode enter");
     shHDFS.exec("hdfs dfsadmin -saveNamespace");
     assertTrue("-saveNamespace failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -safemode leave");
-    shHDFS.exec("hdfs dfsadmin -saveNamespace"); 
+    shHDFS.exec("hdfs dfsadmin -saveNamespace");
     assertTrue("-saveNamespace worked in non safemode", shHDFS.getRet() != 0);
   }
 
   @Test
   public void testDFSrefreshcommands() {
     // refreshNodes
-    System.out.println("-refreshNodes"); 
+    System.out.println("-refreshNodes");
     shHDFS.exec("hdfs dfsadmin -refreshNodes");
     assertTrue("-refreshNodes failed", shHDFS.getRet() == 0);
 
@@ -107,7 +107,7 @@ public class TestDFSAdmin {
     shHDFS.exec("hdfs dfsadmin -refreshServiceAcl");
     System.out.println(shHDFS.getRet());
     assertTrue("-refreshServiceAcl failed", shHDFS.getRet() == 0); */
-   
+
     // refreshUserToGroupsMappings
     System.out.println("-refreshUserToGroupsMappings");
     shHDFS.exec("hdfs dfsadmin -refreshUserToGroupsMappings");
@@ -116,13 +116,13 @@ public class TestDFSAdmin {
     // refreshSuperUserGroupsConfiguration
     System.out.println("-refreshSuperUserGroupsConfiguration");
     shHDFS.exec("hdfs dfsadmin -refreshSuperUserGroupsConfiguration");
-    assertTrue("-refreshSuperUserGroupsConfiguration failed", shHDFS.getRet() == 0); 
+    assertTrue("-refreshSuperUserGroupsConfiguration failed", shHDFS.getRet() == 0);
   }
 
   @Test
-  public void testDFSstorage() {  
+  public void testDFSstorage() {
     // restoreFailedStorage
-    System.out.println("-restoreFailedStorage"); 
+    System.out.println("-restoreFailedStorage");
     shHDFS.exec("hdfs dfsadmin -restoreFailedStorage false");
     assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -restoreFailedStorage check");
@@ -134,7 +134,7 @@ public class TestDFSAdmin {
     assertTrue("-restoreFailedStorage check", shHDFS.getOut().get(0) == "restoreFailedStorage is set to true");
     assertTrue("-restoreFailedStorage check", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -restoreFailedStorage false");
-    assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0); 
+    assertTrue("-restoreFailedStorage false failed", shHDFS.getRet() == 0);
   }
 
   @Test
@@ -142,18 +142,18 @@ public class TestDFSAdmin {
     // setQuota, clrQuota
     System.out.println("-setQuota, -clrQuota");
     shHDFS.exec("date");
-    String quota_test = "quota_test" + shHDFS.getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+    String quota_test = "quota_test" + shHDFS.getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
     shHDFS.exec("hadoop fs -test -e $quota_test");
     if (shHDFS.getRet() == 0) {
       shHDFS.exec("hadoop fs -rmr -skipTrash $quota_test");
       assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
-          shHDFS.getRet() == 0);
+        shHDFS.getRet() == 0);
     }
     shHDFS.exec("hadoop fs -mkdir -p $quota_test");
     shHDFS.exec("hdfs dfsadmin -setQuota 1000 $quota_test");
     assertTrue("-setQuota failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -clrQuota $quota_test");
-    assertTrue("-clrQuota failed", shHDFS.getRet() == 0); 
+    assertTrue("-clrQuota failed", shHDFS.getRet() == 0);
 
     // setSpaceQuota, clrSpaceQuota
     System.out.println("-setSpaceQuota, -clrSpaceQuota");
@@ -161,7 +161,7 @@ public class TestDFSAdmin {
     assertTrue("-setSpaceQuota failed", shHDFS.getRet() == 0);
     shHDFS.exec("hdfs dfsadmin -clrSpaceQuota $quota_test");
     assertTrue("-clrSpaceQuota failed", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -rmr $quota_test"); 
+    shHDFS.exec("hadoop fs -rmr $quota_test");
   }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
index a0848ad..63d4232 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDFSCLI.java
@@ -55,7 +55,7 @@ public class TestDFSCLI extends TestHDFSCLI {
     clitestDataDir = new File(TEST_CACHE_DATA_DIR).toURI().toString().replace(' ', '+');
 
     String[] createTestcliDirCmds = {
-        "hadoop fs -mkdir -p "  + TEST_DIR_ABSOLUTE,
+        "hadoop fs -mkdir -p " + TEST_DIR_ABSOLUTE,
         "hadoop fs -chmod 777 " + TEST_DIR_ABSOLUTE
     };
     shHDFS.exec(createTestcliDirCmds);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
index d14d664..40330fc 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
@@ -26,11 +26,11 @@ import org.apache.hadoop.conf.Configuration;
 
 
 public class TestDistCpIntra {
- 
+
   private static Shell sh = new Shell("/bin/bash -s");
   //extracting user identity for distcp absolute path
   private static final String USERNAME = System.getProperty("user.name");
-  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
   private static String namenode = "";
   private static String testDistcpInputs = "testDistcpInputs" + date;
   private static String testDistcpOutputs = "testDistcpOutputs" + date;
@@ -39,7 +39,7 @@ public class TestDistCpIntra {
   private static String testDistcpOut = "testDistcpOut" + date;
 
   @BeforeClass
-  public static void setUp() {   
+  public static void setUp() {
     // get namenode hostname from core-site.xml
     Configuration conf = new Configuration();
     namenode = conf.get("fs.defaultFS");
@@ -64,7 +64,7 @@ public class TestDistCpIntra {
       String dcpfile_i = "$dcpfile" + "$i" + ".txt";
       sh.exec("echo \"test$i\" > $dcpfile_i");
     }
-    
+
     // copy sample input files to hdfs
     sh.exec("hadoop fs -put $dcpfile* $testDistcpInputs");
     assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
@@ -90,15 +90,15 @@ public class TestDistCpIntra {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs");
       assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
 
     for (int i = 4; i <= 7; i++) {
       sh.exec("hadoop fs -test -e $testDistcpInputs$i");
       if (sh.getRet() == 0) {
-       sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs$i");
+        sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs$i");
         assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
-            sh.getRet() == 0);
+          sh.getRet() == 0);
       }
     }
 
@@ -106,19 +106,19 @@ public class TestDistCpIntra {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testDistcpOutputs");
       assertTrue("Deletion of previous testDistcpOutputs from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
 
   }
 
   @Test
-  public void testDistcpIntra() { 
+  public void testDistcpIntra() {
     for (int i = 1; i <= 2; i++) {
       String dcpfile_i = "$dcpfile" + "$i" + ".txt";
       // running distcp from namenode/src to namenode/dest
       sh.exec("hadoop distcp $namenode/user/$USERNAME/$testDistcpInputs/$dcpfile_i $namenode/user/$USERNAME/$testDistcpOutputs");
       assertTrue("Distcp $i failed", sh.getRet() == 0);
-      
+
       // confirm that copied file is the same as original file
       sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpInputs/$dcpfile_i > $testDistcpIn");
       sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpOutputs/$dcpfile_i > $testDistcpOut");
@@ -128,24 +128,24 @@ public class TestDistCpIntra {
       // clean up
       sh.exec("rm -rf $testDistcpIn", "rm -rf $testDistcpOut");
     }
-  } 
+  }
 
   @Test
-  public void testDistcpIntra_MultipleSources() { 
+  public void testDistcpIntra_MultipleSources() {
     String distcp_sources = "distcp_sources" + date;
     String dcpfile4 = "$testDistcpInputs" + "4/$dcpfile" + "4.txt"
     String dcpfile5 = "$testDistcpInputs" + "5/$dcpfile" + "5.txt"
     String dcpfile6 = "$testDistcpInputs" + "6/$dcpfile" + "6.txt"
     String dcpfile7 = "$testDistcpInputs" + "7/$dcpfile" + "7.txt"
     // distcp mulitple sources
-    sh.exec("hadoop distcp $namenode/user/$USERNAME/$dcpfile4 $namenode/user/$USERNAME/$dcpfile5 $namenode/user/$USERNAME/$testDistcpOutputs");  
+    sh.exec("hadoop distcp $namenode/user/$USERNAME/$dcpfile4 $namenode/user/$USERNAME/$dcpfile5 $namenode/user/$USERNAME/$testDistcpOutputs");
     assertTrue("Distcp multiple sources failed", sh.getRet() == 0);
 
     // distcp source file (-f option)
     sh.exec("echo \"$namenode/user/$USERNAME/$dcpfile6\" > $distcp_sources", "echo \"$namenode/user/$USERNAME/$dcpfile7\" >> $distcp_sources");
     sh.exec("hadoop fs -put $distcp_sources $namenode/user/$USERNAME/$testDistcpInputs");
     sh.exec("rm -rf $distcp_sources");
-    sh.exec("hadoop distcp -f $namenode/user/$USERNAME/$testDistcpInputs/$distcp_sources $namenode/user/$USERNAME/$testDistcpOutputs"); 
+    sh.exec("hadoop distcp -f $namenode/user/$USERNAME/$testDistcpInputs/$distcp_sources $namenode/user/$USERNAME/$testDistcpOutputs");
     assertTrue("Distcp with a source file failed", sh.getRet() == 0);
 
     // confirm that copied files are the same as original files for multiple sources and source file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
index f0b4436..41ee356 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
@@ -27,13 +27,13 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.Configuration;
 
 public class TestFileAppend {
- 
+
   private static Shell sh = new Shell("/bin/bash -s");
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
   private static final String HADOOP_HOME = System.getenv('HADOOP_HOME');
   private static final String HADOOP_CONF_DIR = System.getenv('HADOOP_CONF_DIR');
   private static final String USERNAME = System.getProperty("user.name");
-  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
   private static String testAppendInput = "testAppendInput$date";
   private static String testAppendOutput = "testAppendOutput$date";
   private static String namenode;
@@ -67,15 +67,15 @@ public class TestFileAppend {
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testAppendInput");
       assertTrue("Deletion of previous testAppendInputs from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
 
   }
 
   @Test
-  public void testAppendOnPreExistingFile() { 
+  public void testAppendOnPreExistingFile() {
     FileSystem fs = FileSystem.get(conf);
-    
+
     // setting paths for I/O stream creation
     String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput2.txt$date";
     Path inFile = new Path(myInputPath);
@@ -83,7 +83,7 @@ public class TestFileAppend {
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput1.txt$date";
     Path outFile = new Path(myOutputPath);
     assertTrue("Output file not found", fs.exists(outFile));
-    
+
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
 
@@ -100,7 +100,7 @@ public class TestFileAppend {
   @Test
   public void testAppendOnCreatedFile() {
     FileSystem fs = FileSystem.get(conf);
-    
+
     // setting paths for I/O stream creation
     String myOutputCreate = namenode + "/user/$USERNAME/$testAppendInput/appendinput3.txt$date";
     Path outCreate = new Path(myOutputCreate);
@@ -108,7 +108,7 @@ public class TestFileAppend {
     String myString = "-----TEST INPUT1-----\n";
     InputStream is = new ByteArrayInputStream(myString.getBytes());
     IOUtils.copyBytes(is, outputTemp, 4096, true);
- 
+
     String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput2.txt$date";
     Path inFile = new Path(myInputPath);
     assertTrue("Input file not found", fs.exists(inFile));
@@ -127,8 +127,8 @@ public class TestFileAppend {
     assertTrue("Append did not work", sh.getRet() == 0);
     sh.exec("rm -rf $testAppendOutput", "rm -rf appendinput1.txt$date", "rm -rf appendinput2.txt$date");
     sh.exec("rm -rf appendCorrect.txt$date");
-    sh.exec("rm -rf appendinput3.txt$date"); 
- }
+    sh.exec("rm -rf appendinput3.txt$date");
+  }
 
 
   @Test
@@ -150,11 +150,11 @@ public class TestFileAppend {
     assertTrue("Input file not found", fs.exists(inFile));
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/3mboutput.file$date";
     Path outFile = new Path(myOutputPath);
-    assertTrue("Output file not found", fs.exists(outFile));  
+    assertTrue("Output file not found", fs.exists(outFile));
 
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
-    
+
     // append
     IOUtils.copyBytes(input1, output1, 4096, true);
 
@@ -182,17 +182,17 @@ public class TestFileAppend {
     assertTrue("Input file not found", fs.exists(inFile));
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/test2.file$date";
     Path outFile = new Path(myOutputPath);
-    assertTrue("Output file not found", fs.exists(outFile));  
+    assertTrue("Output file not found", fs.exists(outFile));
 
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
-    
+
     // append
-    IOUtils.copyBytes(input1, output1, 4096, true); 
-  
+    IOUtils.copyBytes(input1, output1, 4096, true);
+
     // running fsck
     shHDFS.exec("hadoop fsck /user/$USERNAME/$testAppendInput/test2.file$date");
-    Boolean success = shHDFS.getOut().get(shHDFS.getOut().size() - 1).contains("is HEALTHY");;
+    Boolean success = shHDFS.getOut().get(shHDFS.getOut().size() - 1).contains("is HEALTHY"); ;
     assertTrue("Append made file unhealthy", success == true);
 
     sh.exec("rm -rf test1.file$date", "rm -rf test2.file$date");
@@ -220,7 +220,7 @@ public class TestFileAppend {
     assertTrue("Input file not found", fs.exists(inFile));
     String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/test4.file$date";
     Path outFile = new Path(myOutputPath);
-    assertTrue("Output file not found", fs.exists(outFile));  
+    assertTrue("Output file not found", fs.exists(outFile));
 
     FSDataInputStream input1 = fs.open(inFile);
     FSDataOutputStream output1 = fs.append(outFile);
@@ -232,7 +232,7 @@ public class TestFileAppend {
     try {
       FSDataOutputStream output2 = fs2.append(outFile);
       assertTrue("Should not have been able to open second output stream", false);
-      IOUtils.closeStream(output2); 
+      IOUtils.closeStream(output2);
     }
     catch (Exception e) {
     }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
index 62efd7c..040c3b5 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFsck.groovy
@@ -26,7 +26,7 @@ import static org.apache.bigtop.itest.LogErrorsUtils.logError
  * Tests the HDFS fsck command.
  */
 public class TestFsck {
-  static Shell shHDFS = new Shell("/bin/bash", "hdfs" )
+  static Shell shHDFS = new Shell("/bin/bash", "hdfs")
   String[] fsckCmds = [
     "hdfs fsck /",
     "hdfs fsck -move /",

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
index e0fca84..f22b005 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSBalancer.groovy
@@ -27,7 +27,7 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestHDFSBalancer {
- 
+
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
   // set with -Dthreshold
   private static String thresh = "10";
@@ -35,10 +35,10 @@ public class TestHDFSBalancer {
   @BeforeClass
   public static void setUp() {
     // unpack resource
-    JarContent.unpackJarContainer(TestHDFSBalancer.class, "." , null);
+    JarContent.unpackJarContainer(TestHDFSBalancer.class, ".", null);
     if (System.getProperty("threshold") != null) {
       thresh = System.getProperty("threshold");
-    }  
+    }
   }
 
   @AfterClass
@@ -46,13 +46,13 @@ public class TestHDFSBalancer {
   }
 
   @Test
-  public void testBalancer() { 
+  public void testBalancer() {
     System.out.println("Running Balancer:");
-    System.out.println("Threshold is set to " + thresh +". Toggle by adding -Dthreshold=#");
+    System.out.println("Threshold is set to " + thresh + ". Toggle by adding -Dthreshold=#");
 
     // must run balancer as hdfs user   
     shHDFS.exec("hdfs balancer -threshold $thresh");
-  
+
     boolean success = false;
     // success_string message signifies balancing worked correctly
     String success_string1 = "The cluster is balanced. Exiting..."
@@ -64,7 +64,7 @@ public class TestHDFSBalancer {
       String next_val = out_iter.next();
       if (next_val.equals(success_string1) || next_val.contains(success_string2) || next_val.contains(success_string3)) {
         success = true;
-       }
+      }
     }
 
     String failure_string1 = "namenodes = []"

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
index a2c0c57..a80f27b 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy
@@ -24,17 +24,17 @@ import org.junit.Test;
 import org.apache.bigtop.itest.shell.Shell;
 
 public class TestHDFSQuota {
- 
+
   private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
   private static Shell sh = new Shell("/bin/bash");
   private static final long LARGE = Long.MAX_VALUE - 1;
   private static final String USERNAME = System.getProperty("user.name");
-  private static String quotaDate = shHDFS.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String quotaDate = shHDFS.exec("date").getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
   private static String testQuotaFolder = "/tmp/testQuotaFolder" + quotaDate;
   private static String testQuotaFolder1 = testQuotaFolder + "1";
   private static String testQuotaFolder2 = testQuotaFolder + "2";
   private static String testQuotaFolder3 = testQuotaFolder + "3";
-  
+
   @Before
   public void setUp() {
     // creating test folders
@@ -52,24 +52,24 @@ public class TestHDFSQuota {
     if (shHDFS.getRet() == 0) {
       shHDFS.exec("hadoop fs -rmr -skipTrash $testQuotaFolder1");
       assertTrue("Deletion of previous testQuotaFolder1 from HDFS failed",
-          shHDFS.getRet() == 0);
+        shHDFS.getRet() == 0);
     }
     shHDFS.exec("hadoop fs -test -e $testQuotaFolder2");
     if (shHDFS.getRet() == 0) {
       shHDFS.exec("hadoop fs -rmr -skipTrash $testQuotaFolder2");
       assertTrue("Deletion of previous testQuotaFolder2 from HDFS failed",
-          shHDFS.getRet() == 0);
+        shHDFS.getRet() == 0);
     }
     sh.exec("hadoop fs -test -e $testQuotaFolder1");
     if (sh.getRet() == 0) {
       sh.exec("hadoop fs -rmr -skipTrash $testQuotaFolder1");
       assertTrue("Deletion of previous testQuotaFolder1 from HDFS failed",
-          sh.getRet() == 0);
+        sh.getRet() == 0);
     }
   }
 
   @Test
-  public void testNewlyCreatedDir() { 
+  public void testNewlyCreatedDir() {
     // newly created dir should have no name quota, no space quota   
     shHDFS.exec("hadoop fs -count -q $testQuotaFolder1");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
@@ -78,10 +78,10 @@ public class TestHDFSQuota {
     assertTrue("Newly created directory had a set name quota left", output[1].equals("inf"));
     assertTrue("Newly created directory had a set space quota", output[2].equals("none"));
     assertTrue("Newly created directory had a set space quota left", output[3].equals("inf"));
-  } 
+  }
 
   @Test
-  public void testAdminPermissions() { 
+  public void testAdminPermissions() {
     // admin setting quotas should succeed
     shHDFS.exec("hadoop dfsadmin -setQuota 10 $testQuotaFolder1");
     assertTrue("setQuota failed", shHDFS.getRet() == 0);
@@ -103,10 +103,10 @@ public class TestHDFSQuota {
     assertTrue("clrQuota failed", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -clrSpaceQuota $testQuotaFolder1");
     assertTrue("clrSpaceQuota failed", shHDFS.getRet() == 0);
-  } 
+  }
 
   @Test
-  public void testRename() { 
+  public void testRename() {
     // name and space quotas stick after rename
     shHDFS.exec("hadoop fs -count -q $testQuotaFolder1");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
@@ -124,16 +124,16 @@ public class TestHDFSQuota {
   }
 
   @Test
-  public void testInputValues() { 
+  public void testInputValues() {
     // the largest allowable quota size is Long.Max_Value and must be greater than zero
     shHDFS.exec("hadoop dfsadmin -setQuota -1 $testQuotaFolder1");
     assertTrue("setQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota -1 $testQuotaFolder1");
-    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);  
+    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 1.04 $testQuotaFolder1");
     assertTrue("setQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1.04 $testQuotaFolder1");
-    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);        
+    assertTrue("setSpaceQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 0 $testQuotaFolder1");
     assertTrue("setQuota should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 0 $testQuotaFolder1");
@@ -158,11 +158,11 @@ public class TestHDFSQuota {
   }
 
   @Test
-  public void testQuotasPostViolation() {  
+  public void testQuotasPostViolation() {
     // quota can be set even if it violates
     shHDFS.exec("hadoop dfsadmin -setQuota $LARGE $testQuotaFolder1");
     assertTrue("Could not setQuota", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString1", "-------TEST STRING--------"); 
+    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString1", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop fs -mkdir $testQuotaFolder1" + "/sample1");
     assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);
@@ -180,8 +180,8 @@ public class TestHDFSQuota {
     assertTrue("mkdir should not have worked", shHDFS.getRet() != 0);
 
     // file creation should fail - name quota
-    shHDFS.exec("hadoop fs -rmr $testQuotaFolder1" + "/testString1"); 
-    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString2", "-------TEST STRING--------"); 
+    shHDFS.exec("hadoop fs -rmr $testQuotaFolder1" + "/testString1");
+    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString2", "-------TEST STRING--------");
     assertTrue("put should not have worked", shHDFS.getRet() != 0);
 
     // file creation should fail - space quota
@@ -189,8 +189,8 @@ public class TestHDFSQuota {
     assertTrue("Could not setSpaceQuota", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 1000 $testQuotaFolder1");
     assertTrue("Could not setQuota", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -put - $testQuotaFolder1"  + "/testString3", "-------TEST STRING--------"); 
-    assertTrue("put should not have worked", shHDFS.getRet() != 0); 
+    shHDFS.exec("hadoop fs -put - $testQuotaFolder1" + "/testString3", "-------TEST STRING--------");
+    assertTrue("put should not have worked", shHDFS.getRet() != 0);
   }
 
   //@Test - can be reinstated upon resolution of BIGTOP-635 due to restarting of hdfs service
@@ -202,37 +202,35 @@ public class TestHDFSQuota {
     shHDFS.exec("hadoop fs -put - $date" + "/testString1", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setQuota 1 $date");
-    assertTrue("Could not setQuota", shHDFS.getRet() == 0); 
+    assertTrue("Could not setQuota", shHDFS.getRet() == 0);
     shHDFS.exec("date");
-    String date1 = "logTest" + shHDFS.getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+    String date1 = "logTest" + shHDFS.getOut().get(0).replaceAll("\\s", "").replaceAll(":", "");
     shHDFS.exec("hadoop fs -mkdir $date1");
     assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);
-    shHDFS.exec("hadoop fs -put - $date1"  + "/testString2", "-------TEST STRING--------"); 
+    shHDFS.exec("hadoop fs -put - $date1" + "/testString2", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1 $date1");
-    assertTrue("Could not setSpaceQuota", shHDFS.getRet() == 0); 
+    assertTrue("Could not setSpaceQuota", shHDFS.getRet() == 0);
     shHDFS.exec("for service in /etc/init.d/hadoop-hdfs-*; do sudo \$service stop; done");
     shHDFS.exec("for service in /etc/init.d/hadoop-hdfs-*; do sudo \$service start; done");
     shHDFS.exec("grep \"Quota violation in image for //user/hdfs/$date\" /var/log/hadoop-hdfs/hadoop-hdfs-namenode*.log");
     if (shHDFS.getOut().isEmpty()) {
       assertTrue("Log was not written", 1 == 0);
-    }
-    else {
+    } else {
       assertTrue(shHDFS.getOut().get(0).contains(date));
     }
     shHDFS.exec("grep \"Quota violation in image for //user/hdfs/$date1\" /var/log/hadoop-hdfs/hadoop-hdfs-namenode*.log");
     if (shHDFS.getOut().isEmpty()) {
       assertTrue("Log was not written", 1 == 0);
-    }
-    else {
+    } else {
       assertTrue(shHDFS.getOut().get(0).contains(date1));
     }
-    
+
     shHDFS.exec("hadoop fs -rmr $date1");
     // following while loop is due to namenode going into safemode for about 15 seconds after being restarted
     while (shHDFS.getErr().get(0).contains("safe mode") || (shHDFS.getErr().size() > 1 && shHDFS.getErr().get(1).contains("safe mode"))) {
-          shHDFS.exec("hadoop fs -rmr $date1");
-    } 
+      shHDFS.exec("hadoop fs -rmr $date1");
+    }
   }
 
   @Test
@@ -246,7 +244,7 @@ public class TestHDFSQuota {
     shHDFS.exec("hadoop dfsadmin -setQuota 1000 $date/testString1");
     assertTrue("setting quota on a file should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1000 $date/testString1");
-    assertTrue("setting quota on a file should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("setting quota on a file should not have worked", shHDFS.getRet() != 0);
 
     // Errors when clearing quotas on a file
     shHDFS.exec("hadoop dfsadmin -clrQuota $date/testString1");
@@ -256,15 +254,15 @@ public class TestHDFSQuota {
 
     // set/clr quota on nonexistant directory
     shHDFS.exec("hadoop dfsadmin -setQuota 100 DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 100 DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("setting quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -clrQuota DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
     shHDFS.exec("hadoop dfsadmin -clrSpaceQuota DIRECTORYDOESNOTEXIST" + date);
-    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0); 
+    assertTrue("clearing quota on non-existant directory should not have worked", shHDFS.getRet() != 0);
 
-    shHDFS.exec("hadoop fs -rmr $date"); 
+    shHDFS.exec("hadoop fs -rmr $date");
   }
 
   @Test
@@ -272,29 +270,29 @@ public class TestHDFSQuota {
     // increasing/decreasing replication factor of a file should debit/credit quota
     String repFolder = "/tmp/repFactorTest" + quotaDate;
     shHDFS.exec("hadoop fs -mkdir $repFolder");
-    assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);    
-    shHDFS.exec("hadoop fs -put - $repFolder" + "/testString1" , "-------TEST STRING--------");
+    assertTrue("Could not use mkdir command", shHDFS.getRet() == 0);
+    shHDFS.exec("hadoop fs -put - $repFolder" + "/testString1", "-------TEST STRING--------");
     assertTrue("Could not use put command", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop dfsadmin -setSpaceQuota 1000 $repFolder");
-    assertTrue("Could not setQuota", shHDFS.getRet() == 0); 
+    assertTrue("Could not setQuota", shHDFS.getRet() == 0);
     shHDFS.exec("hadoop fs -setrep 1 $repFolder/testString1");
     shHDFS.exec("hadoop fs -count -q $repFolder");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
-    String[] output = shHDFS.getOut().get(0).trim().split();   
+    String[] output = shHDFS.getOut().get(0).trim().split();
     int size_of_one = Integer.parseInt(output[2]) - Integer.parseInt(output[3]);
     shHDFS.exec("hadoop fs -setrep 5 $repFolder/testString1");
     shHDFS.exec("hadoop fs -count -q $repFolder");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
-    output = shHDFS.getOut().get(0).trim().split();   
+    output = shHDFS.getOut().get(0).trim().split();
     int size_of_five = Integer.parseInt(output[2]) - Integer.parseInt(output[3]);
     assertTrue("Quota not debited correctly", size_of_one * 5 == size_of_five);
     shHDFS.exec("hadoop fs -setrep 3 $repFolder/testString1");
     shHDFS.exec("hadoop fs -count -q $repFolder");
     assertTrue("Could not use count command", shHDFS.getRet() == 0);
-    output = shHDFS.getOut().get(0).trim().split();   
+    output = shHDFS.getOut().get(0).trim().split();
     int size_of_three = Integer.parseInt(output[2]) - Integer.parseInt(output[3]);
     assertTrue("Quota not credited correctly", size_of_one * 3 == size_of_three);
-    shHDFS.exec("hadoop fs -rmr $repFolder"); 
+    shHDFS.exec("hadoop fs -rmr $repFolder");
   }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
index 12e655e..52df9cb 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy
@@ -30,10 +30,10 @@ class TestTextSnappy {
   static String snappyFile = "part-00001.snappy"
 
   @BeforeClass
-  static void  setUp() throws IOException {
+  static void setUp() throws IOException {
     sh.exec(
-    "hadoop fs  -mkdir ${testDir}",
-    "hadoop fs -put ${snappyFile} ${testDir}/${snappyFile}",
+      "hadoop fs  -mkdir ${testDir}",
+      "hadoop fs -put ${snappyFile} ${testDir}/${snappyFile}",
     )
     logError(sh)
   }


[2/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across test-artifacts

Posted by db...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestSLive.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestSLive.groovy b/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestSLive.groovy
index de03dad..a242910 100644
--- a/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestSLive.groovy
+++ b/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestSLive.groovy
@@ -37,12 +37,12 @@ import static org.junit.Assert.assertTrue
 public class TestSLive {
   static Shell sh = new Shell("/bin/bash -s")
   private static final String hadoopMapReduceHome =
-      System.getProperty('HADOOP_MAPRED_HOME', '/usr/lib/hadoop-mapreduce')
+    System.getProperty('HADOOP_MAPRED_HOME', '/usr/lib/hadoop-mapreduce')
   private static final String Slive_jar =
     JarContent.getJarName(hadoopMapReduceHome,
-            'hadoop.mapreduce.client.jobclient.*.tests.jar')
+      'hadoop.mapreduce.client.jobclient.*.tests.jar')
   private static final String SLIVE_JAR =
-    hadoopMapReduceHome  + "/" + Slive_jar
+    hadoopMapReduceHome + "/" + Slive_jar
   private static final int SLEEP_TIMEOUT = 5000
   private static final String SLIVE_OUTPUT_FILE = "/test/slive/slive/output"
   private static final String SLIVE_ROOT_FILE = "/test/slive"
@@ -57,7 +57,7 @@ public class TestSLive {
   @BeforeClass
   static void setUp() throws IOException {
     assertNotNull("Can't find hadoop.mapreduce.client.jobclient.tests.jar",
-            Slive_jar)
+      Slive_jar)
     final String numSliveFiles = System.getProperty("numSliveFiles", "100")
     final String writeSize = System.getProperty("writeSize", "20480,20480")
     final String readSize = System.getProperty("readSize", "20480,20480")
@@ -65,30 +65,30 @@ public class TestSLive {
     final String blockSize = System.getProperty("blockSize", "10240,10240")
 
     String SLIVE_TEMPLATE = "hadoop jar %s SliveTest -create %s -delete %s " +
-            "-rename %s -read %s -append %s -ls %s -mkdir %s -files %s " +
-            "-writeSize %s -readSize %s -appendSize %s -blockSize %s -resFile %s"
+      "-rename %s -read %s -append %s -ls %s -mkdir %s -files %s " +
+      "-writeSize %s -readSize %s -appendSize %s -blockSize %s -resFile %s"
     sliveCmds = [
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 100, 0, 0, 0, 0, 0, 0,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputcreate.txt"), //create
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputcreate.txt"), //create
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 0, 0, 100, 0, 0, 0, 0,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputrename.txt"), //rename
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputrename.txt"), //rename
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 0, 0, 0, 100, 0, 0, 0,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputread.txt"), //read
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputread.txt"), //read
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 0, 0, 0, 0, 100, 0, 0,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputappend.txt"), //append
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputappend.txt"), //append
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 0, 0, 0, 0, 0, 100, 0,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputls.txt"), //ls
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputls.txt"), //ls
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 0, 100, 0, 0, 0, 0, 0,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputdelete.txt"), //delete
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputdelete.txt"), //delete
       String.format(SLIVE_TEMPLATE, SLIVE_JAR, 20, 0, 0, 20, 20, 20, 20,
-              numSliveFiles, writeSize, readSize, appendSize, blockSize,
-              "sliveOutputmix.txt") //mix
+        numSliveFiles, writeSize, readSize, appendSize, blockSize,
+        "sliveOutputmix.txt") //mix
     ]
   }
 
@@ -105,10 +105,10 @@ public class TestSLive {
 
   @Test
   public void testSlive() {
-    if(FailureVars.instance.getRunFailures()
-        || FailureVars.instance.getServiceRestart()
-        || FailureVars.instance.getServiceKill()
-        || FailureVars.instance.getNetworkShutdown()) {
+    if (FailureVars.instance.getRunFailures()
+      || FailureVars.instance.getServiceRestart()
+      || FailureVars.instance.getServiceKill()
+      || FailureVars.instance.getNetworkShutdown()) {
       runFailureThread();
     }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy b/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy
index e67b6a6..a4ed8df 100644
--- a/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy
+++ b/bigtop-tests/test-artifacts/mahout/src/main/groovy/org/apache/bigtop/itest/mahout/smoke/TestMahoutExamples.groovy
@@ -16,6 +16,7 @@
 * limitations under the License.
 */
 package org.apache.bigtop.itest.mahout.smoke;
+
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
@@ -28,251 +29,250 @@ import org.apache.bigtop.itest.JarContent;
 import org.apache.bigtop.itest.shell.Shell;
 
 /**
-* Test Mahout examples shipped with the distribution.
-*/
+ * Test Mahout examples shipped with the distribution.
+ */
 public class TestMahoutExamples {
-    public static final String TEMP_DIR = "/tmp/mahout.${(new Date().getTime())}";
-    public static final String WORK_DIR = TEMP_DIR;
-
-    /**
-    * If MAHOUT_HOME is supplied, use that as the executable.  Else, use
-    * mahout.  This eases the testing of tarball installations and other scenarios
-    * where possible more than one version of an ecosystem component is available.
-    */
-    public static String MAHOUT_HOME = System.getenv("MAHOUT_HOME") ;
-    public static String MAHOUT = MAHOUT_HOME ? MAHOUT_HOME+"/bin/mahout":"mahout"
-
-    private static Shell sh = new Shell("/bin/bash -s");
-    public static String download_dir = System.getProperty("mahout.examples.resources.download.path") ?: "/tmp" ;
-    
-
-    /**
-    *  Mahout smokes rely on a lot of external files.  So we
-    *  modularize the downloads into a single function, so that
-    *  the setup is easier to debug.  If any download results in a
-    *  small file (i.e. due to 404 or 500 error), assertion will fail
-    *  before the smokes actually start.
-    */
-    public static void download(){
-
-        //key value pairs : data file > url that file resides on.
-        def urlmap = [
-        "20news-bydate.tar.gz":
-        "http://people.csail.mit.edu/jrennie/20Newsgroups/20news-bydate.tar.gz" ,
-
-        "reuters21578.tar.gz":
+  public static final String TEMP_DIR = "/tmp/mahout.${(new Date().getTime())}";
+  public static final String WORK_DIR = TEMP_DIR;
+
+  /**
+   * If MAHOUT_HOME is supplied, use that as the executable.  Else, use
+   * mahout.  This eases the testing of tarball installations and other scenarios
+   * where possible more than one version of an ecosystem component is available.
+   */
+  public static String MAHOUT_HOME = System.getenv("MAHOUT_HOME");
+  public static String MAHOUT = MAHOUT_HOME ? MAHOUT_HOME + "/bin/mahout" : "mahout"
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  public static String download_dir = System.getProperty("mahout.examples.resources.download.path") ?: "/tmp";
+
+  /**
+   *  Mahout smokes rely on a lot of external files.  So we
+   *  modularize the downloads into a single function, so that
+   *  the setup is easier to debug.  If any download results in a
+   *  small file (i.e. due to 404 or 500 error), assertion will fail
+   *  before the smokes actually start.
+   */
+  public static void download() {
+
+    //key value pairs : data file > url that file resides on.
+    def urlmap = [
+      "20news-bydate.tar.gz":
+        "http://people.csail.mit.edu/jrennie/20Newsgroups/20news-bydate.tar.gz",
+
+      "reuters21578.tar.gz":
         "http://kdd.ics.uci.edu/databases/reuters21578/reuters21578.tar.gz",
 
-        "synthetic_control.data":
+      "synthetic_control.data":
         "http://archive.ics.uci.edu/ml/databases/synthetic_control/synthetic_control.data",
 
-        "ml-1m.zip":
+      "ml-1m.zip":
         "http://files.grouplens.org/papers/ml-1m.zip"
-        ];
-        //For each url above, download it.
-        urlmap.each() {
-            f_name,loc ->
-            sh.exec("if [ ! -f ${download_dir}/${f_name} ]; then " +
-            "curl ${loc} -o ${download_dir}/${f_name}; " +
-            "fi");
-            File file = new File("${download_dir}/${f_name}");
-
-            assertTrue("file "+ f_name + " at  "+loc + " len=" + file.length() + " is > 5k bytes", file.length() > 5000 );
-        }
-
-    }
-
-    /**
-    * Individual tests (i.e. movie lens factorizer) will selectively copy this directory into the
-    * distributed file system & then run tests against it (i.e. movie lens factorizer uses "fs -put" after
-    * formatting a csv file in the tmp dir).
-    */
-    @BeforeClass
-    public static void setUp() {
-        download();
-        
-        // uncompress archives
-        sh.exec("mkdir ${TEMP_DIR}",
-        "cd ${TEMP_DIR}",
-        //Create news-date data dir :: input for classifier test
-        "mkdir 20news-bydate",
-        "cd 20news-bydate",
-        "tar xzf ${download_dir}/20news-bydate.tar.gz",
-        "cd ..",
-        //Create news-all data directory :: input for LDA test
-        "mkdir 20news-all",
-        "cp -R 20news-bydate/*/* 20news-all",
-        "mkdir reuters-sgm",
-        "cd reuters-sgm",
-        "tar xzf ${download_dir}/reuters21578.tar.gz",
-        "cd ..",
-        //Create movie lens data directory :: input data for movie recommender test
-        "mkdir movielens",
-        "cd movielens",
-        "unzip ${download_dir}/ml-1m.zip");
-        assertEquals("Failed to uncompress archives", 0, sh.getRet());
-        sh.exec("hadoop fs -mkdir ${WORK_DIR}");
-        assertEquals("Unable to create work dir in HCFS", 0, sh.getRet());
-        rmr("temp");
-    }
-
-    /**
-    * Run method that tests for 0 return code and logs the entire command.
-    */
-    public void assertRun(String mahoutJob){
-        final String cmd = MAHOUT+" "+mahoutJob;
-
-        //Cat the commands to a central file thats easy to tail.
-        //TODO a simpler
-        sh.exec("echo \""+cmd+"\" >> /var/log/mahout.smoke");
-        sh.exec(cmd);
-        assertEquals("non-zero return! :::: "+cmd + " :::: out= " + sh.out + " :::: err= "+sh.err, 0, sh.getRet());
-    }
-
-    @AfterClass
-    public static void tearDown() {
-        sh.exec("rm -rf ${TEMP_DIR}",
-        "hadoop fs -rmr ${WORK_DIR}");
-    }
-
-    private static void rmr(String path) {
-        sh.exec("hadoop fs -test -e $path");
-        if (sh.getRet() == 0) {
-            sh.exec("hadoop fs -rmr -skipTrash $path");
-            assertEquals("Deletion of $path from the underlying FileSystem failed", 0, sh.getRet());
-        }
+    ];
+    //For each url above, download it.
+    urlmap.each() {
+      f_name, loc ->
+        sh.exec("if [ ! -f ${download_dir}/${f_name} ]; then " +
+          "curl ${loc} -o ${download_dir}/${f_name}; " +
+          "fi");
+        File file = new File("${download_dir}/${f_name}");
+
+        assertTrue("file " + f_name + " at  " + loc + " len=" + file.length() + " is > 5k bytes", file.length() > 5000);
     }
 
-    @After
-    public void killHangingProcess() {
-        sh.exec("mapred job -list | grep 'Total jobs:0'");
-        if (sh.getRet() == 0) {
-            sh.exec("for jobid in `mapred job -list | grep 'RUNNING' |awk '{print \$1}'`;",
-            "do mapred job -kill \${jobid};",
-            "done");
-        }
+  }
+
+  /**
+   * Individual tests (i.e. movie lens factorizer) will selectively copy this directory into the
+   * distributed file system & then run tests against it (i.e. movie lens factorizer uses "fs -put" after
+   * formatting a csv file in the tmp dir).
+   */
+  @BeforeClass
+  public static void setUp() {
+    download();
+
+    // uncompress archives
+    sh.exec("mkdir ${TEMP_DIR}",
+      "cd ${TEMP_DIR}",
+      //Create news-date data dir :: input for classifier test
+      "mkdir 20news-bydate",
+      "cd 20news-bydate",
+      "tar xzf ${download_dir}/20news-bydate.tar.gz",
+      "cd ..",
+      //Create news-all data directory :: input for LDA test
+      "mkdir 20news-all",
+      "cp -R 20news-bydate/*/* 20news-all",
+      "mkdir reuters-sgm",
+      "cd reuters-sgm",
+      "tar xzf ${download_dir}/reuters21578.tar.gz",
+      "cd ..",
+      //Create movie lens data directory :: input data for movie recommender test
+      "mkdir movielens",
+      "cd movielens",
+      "unzip ${download_dir}/ml-1m.zip");
+    assertEquals("Failed to uncompress archives", 0, sh.getRet());
+    sh.exec("hadoop fs -mkdir ${WORK_DIR}");
+    assertEquals("Unable to create work dir in HCFS", 0, sh.getRet());
+    rmr("temp");
+  }
+
+  /**
+   * Run method that tests for 0 return code and logs the entire command.
+   */
+  public void assertRun(String mahoutJob) {
+    final String cmd = MAHOUT + " " + mahoutJob;
+
+    //Cat the commands to a central file thats easy to tail.
+    //TODO a simpler
+    sh.exec("echo \"" + cmd + "\" >> /var/log/mahout.smoke");
+    sh.exec(cmd);
+    assertEquals("non-zero return! :::: " + cmd + " :::: out= " + sh.out + " :::: err= " + sh.err, 0, sh.getRet());
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("rm -rf ${TEMP_DIR}",
+      "hadoop fs -rmr ${WORK_DIR}");
+  }
+
+  private static void rmr(String path) {
+    sh.exec("hadoop fs -test -e $path");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rmr -skipTrash $path");
+      assertEquals("Deletion of $path from the underlying FileSystem failed", 0, sh.getRet());
     }
-
-    //iterations for factorizer, original value was "10",
-    //on a small 4 node cluster, 2 iterations
-    //should complete in about 5 minutes or so.
-    static final int ITERATIONS=2;
-
-    /**
-    * This is the full workflow for creating recommendations based on movie
-    * ratings including creating training/test data, ALS for training, evaluating
-    * the ALS, and then outputting final movie recommendations for users.
-    */
-    @Test(timeout=12000000L)
-    public void factorizeMovieLensRatings() {
-        // convert ratings
-        sh.exec("cat ${TEMP_DIR}/movielens/ml-1m/ratings.dat |sed -e s/::/,/g| cut -d, -f1,2,3 > ${TEMP_DIR}/movielens/ratings.csv");
-        assertEquals("Unexpected error from converting ratings", 0, sh.getRet());
-
-        // put ratings in hdfs
-        sh.exec("hadoop fs -mkdir ${WORK_DIR}/movielens",
-        "hadoop fs -put ${TEMP_DIR}/movielens/ratings.csv ${WORK_DIR}/movielens/ratings.csv");
-        assertEquals("Unable to put movielens/ratings.csv in hdfs", 0, sh.getRet());
-
-        //create a 90% percent training set and a 10% probe set
-        assertRun("splitDataset --input ${WORK_DIR}/movielens/ratings.csv --output ${WORK_DIR}/dataset " +
-        "--trainingPercentage 0.9 --probePercentage 0.1 --tempDir ${WORK_DIR}/dataset/tmp");
-
-        //Default iterations was 10, but for simple smokes that most might run,
-        //2 iterations will confirm enough to move on.
-
-        //run distributed ALS-WR to factorize the rating matrix based on the training set
-        
-        assertRun("parallelALS --input ${WORK_DIR}/dataset/trainingSet/ --output ${WORK_DIR}/als/out " +
-        "--tempDir ${WORK_DIR}/als/tmp --numFeatures 20 --numIterations ${ITERATIONS} --lambda 0.065");
-
-        //remove this
-        sh.exec("hadoop fs -ls ${WORK_DIR}/als/out >> /tmp/mahoutdebug");
-        //compute predictions against the probe set, measure the error
-        assertRun("evaluateFactorization --output ${WORK_DIR}/als/rmse --input ${WORK_DIR}/dataset/probeSet/ " +
-        "--userFeatures ${WORK_DIR}/als/out/U/ --itemFeatures ${WORK_DIR}/als/out/M/ --tempDir ${WORK_DIR}/als/tmp");
-
-        //compute recommendations
-        assertRun("recommendfactorized --input ${WORK_DIR}/als/out/userRatings/ --output ${WORK_DIR}/recommendations " +
-        "--userFeatures ${WORK_DIR}/als/out/U/ --itemFeatures ${WORK_DIR}/als/out/M/ " +
-        "--numRecommendations 6 --maxRating 5");
-
-        // check that error has been calculated
-        assertEquals("${WORK_DIR}/als/rmse/rmse.txt does not exist", 0, sh.getRet());
-        // print the error
-        sh.exec("hadoop fs -cat ${WORK_DIR}/als/rmse/rmse.txt");
-        assertEquals("Unexpected error from running hadoop", 0, sh.getRet());
-
-        // check that recommendations has been calculated
-        sh.exec("hadoop fs -test -e ${WORK_DIR}/recommendations/part-m-00000");
-        assertEquals("${WORK_DIR}/recommendations/part-m-00000 does not exist", 0, sh.getRet());
+  }
+
+  @After
+  public void killHangingProcess() {
+    sh.exec("mapred job -list | grep 'Total jobs:0'");
+    if (sh.getRet() == 0) {
+      sh.exec("for jobid in `mapred job -list | grep 'RUNNING' |awk '{print \$1}'`;",
+        "do mapred job -kill \${jobid};",
+        "done");
     }
-
-    /**
-    * Alternative to parameterized test: this is a test that is implemented by each
-    * individual clustering test.
-    *
-    * Explanation of clustering tests:
-    *
-    * Each of the below tests runs a different clustering algorithm against the same
-    * input data set, against synthesize "control" data.  "Control data" is data that shows
-    * the time series performance of a process.  For example, a cellphone company
-    * might want to run this to find which regions have decreasing performance over time (i.e. due to increased population),
-    * versus places which have cyclic performance (i.e. due to weather).
+  }
+
+  //iterations for factorizer, original value was "10",
+  //on a small 4 node cluster, 2 iterations
+  //should complete in about 5 minutes or so.
+  static final int ITERATIONS = 2;
+
+  /**
+   * This is the full workflow for creating recommendations based on movie
+   * ratings including creating training/test data, ALS for training, evaluating
+   * the ALS, and then outputting final movie recommendations for users.
+   */
+  @Test(timeout = 12000000L)
+  public void factorizeMovieLensRatings() {
+    // convert ratings
+    sh.exec("cat ${TEMP_DIR}/movielens/ml-1m/ratings.dat |sed -e s/::/,/g| cut -d, -f1,2,3 > ${TEMP_DIR}/movielens/ratings.csv");
+    assertEquals("Unexpected error from converting ratings", 0, sh.getRet());
+
+    // put ratings in hdfs
+    sh.exec("hadoop fs -mkdir ${WORK_DIR}/movielens",
+      "hadoop fs -put ${TEMP_DIR}/movielens/ratings.csv ${WORK_DIR}/movielens/ratings.csv");
+    assertEquals("Unable to put movielens/ratings.csv in hdfs", 0, sh.getRet());
+
+    //create a 90% percent training set and a 10% probe set
+    assertRun("splitDataset --input ${WORK_DIR}/movielens/ratings.csv --output ${WORK_DIR}/dataset " +
+      "--trainingPercentage 0.9 --probePercentage 0.1 --tempDir ${WORK_DIR}/dataset/tmp");
+
+    //Default iterations was 10, but for simple smokes that most might run,
+    //2 iterations will confirm enough to move on.
+
+    //run distributed ALS-WR to factorize the rating matrix based on the training set
+
+    assertRun("parallelALS --input ${WORK_DIR}/dataset/trainingSet/ --output ${WORK_DIR}/als/out " +
+      "--tempDir ${WORK_DIR}/als/tmp --numFeatures 20 --numIterations ${ITERATIONS} --lambda 0.065");
+
+    //remove this
+    sh.exec("hadoop fs -ls ${WORK_DIR}/als/out >> /tmp/mahoutdebug");
+    //compute predictions against the probe set, measure the error
+    assertRun("evaluateFactorization --output ${WORK_DIR}/als/rmse --input ${WORK_DIR}/dataset/probeSet/ " +
+      "--userFeatures ${WORK_DIR}/als/out/U/ --itemFeatures ${WORK_DIR}/als/out/M/ --tempDir ${WORK_DIR}/als/tmp");
+
+    //compute recommendations
+    assertRun("recommendfactorized --input ${WORK_DIR}/als/out/userRatings/ --output ${WORK_DIR}/recommendations " +
+      "--userFeatures ${WORK_DIR}/als/out/U/ --itemFeatures ${WORK_DIR}/als/out/M/ " +
+      "--numRecommendations 6 --maxRating 5");
+
+    // check that error has been calculated
+    assertEquals("${WORK_DIR}/als/rmse/rmse.txt does not exist", 0, sh.getRet());
+    // print the error
+    sh.exec("hadoop fs -cat ${WORK_DIR}/als/rmse/rmse.txt");
+    assertEquals("Unexpected error from running hadoop", 0, sh.getRet());
+
+    // check that recommendations has been calculated
+    sh.exec("hadoop fs -test -e ${WORK_DIR}/recommendations/part-m-00000");
+    assertEquals("${WORK_DIR}/recommendations/part-m-00000 does not exist", 0, sh.getRet());
+  }
+
+  /**
+   * Alternative to parameterized test: this is a test that is implemented by each
+   * individual clustering test.
+   *
+   * Explanation of clustering tests:
+   *
+   * Each of the below tests runs a different clustering algorithm against the same
+   * input data set, against synthesize "control" data.  "Control data" is data that shows
+   * the time series performance of a process.  For example, a cellphone company
+   * might want to run this to find which regions have decreasing performance over time (i.e. due to increased population),
+   * versus places which have cyclic performance (i.e. due to weather).
+   */
+  private void _clusterSyntheticControlData(String algorithm) {
+    rmr("testdata");
+    sh.exec("hadoop fs -mkdir testdata",
+      "hadoop fs -put ${download_dir}/synthetic_control.data testdata");
+    assertEquals("Unable to put data in hdfs", 0, sh.getRet());
+    assertRun("org.apache.mahout.clustering.syntheticcontrol.${algorithm}.Job");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+  }
+
+  @Test(timeout = 900000L)
+  public void clusterControlDataWithCanopy() {
+    _clusterSyntheticControlData("canopy");
+  }
+
+  @Test(timeout = 9000000L)
+  public void clusterControlDataWithKMeans() {
+    _clusterSyntheticControlData("kmeans");
+  }
+
+  @Test(timeout = 9000000L)
+  public void clusterControlDataWithFuzzyKMeans() {
+    _clusterSyntheticControlData("fuzzykmeans");
+  }
+
+  /**
+   * Test the creation of topical clusters from raw lists words using LDA.
+   */
+  @Test(timeout = 7200000L)
+  public void testReutersLDA() {
+    // where does lda.algorithm come in?
+    assertRun("org.apache.lucene.benchmark.utils.ExtractReuters ${TEMP_DIR}/reuters-sgm ${TEMP_DIR}/reuters-out");
+    //put ${TEMP_DIR}/reuters-out into hdfs as we have to run seqdirectory in mapreduce mode, so files need be in hdfs
+    sh.exec("hadoop fs -put ${TEMP_DIR}/reuters-out ${WORK_DIR}/reuters-out");
+    assertEquals("Unable to put reuters-out-seqdir in hdfs", 0, sh.getRet());
+
+    assertRun("seqdirectory -i ${TEMP_DIR}/reuters-out -o ${TEMP_DIR}/reuters-out-seqdir -c UTF-8 -chunk 5");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    /*
+    // reuters-out-seqdir exists on a local disk at this point,
+    // copy it to hdfs
+    rmr("${WORK_DIR}/reuters-out-seqdir");
+    sh.exec("hadoop fs -put ${TEMP_DIR}/reuters-out-seqdir ${WORK_DIR}/reuters-out-seqdir");
+    assertEquals("Unable to put reuters-out-seqdir in hdfs", 0, sh.getRet());
     */
-    private void _clusterSyntheticControlData(String algorithm) {
-        rmr("testdata");
-        sh.exec("hadoop fs -mkdir testdata",
-        "hadoop fs -put ${download_dir}/synthetic_control.data testdata");
-        assertEquals("Unable to put data in hdfs", 0, sh.getRet());
-        assertRun("org.apache.mahout.clustering.syntheticcontrol.${algorithm}.Job");
-        assertEquals("Unexpected error from running mahout", 0, sh.getRet());
-    }
-
-    @Test(timeout=900000L)
-    public void clusterControlDataWithCanopy() {
-        _clusterSyntheticControlData("canopy");
-    }
-
-    @Test(timeout=9000000L)
-    public void clusterControlDataWithKMeans() {
-        _clusterSyntheticControlData("kmeans");
-    }
-
-    @Test(timeout=9000000L)
-    public void clusterControlDataWithFuzzyKMeans() {
-        _clusterSyntheticControlData("fuzzykmeans");
-    }
-
-    /**
-    * Test the creation of topical clusters from raw lists words using LDA.
-    */
-    @Test(timeout=7200000L)
-    public void testReutersLDA() {
-        // where does lda.algorithm come in?
-        assertRun("org.apache.lucene.benchmark.utils.ExtractReuters ${TEMP_DIR}/reuters-sgm ${TEMP_DIR}/reuters-out");
-        //put ${TEMP_DIR}/reuters-out into hdfs as we have to run seqdirectory in mapreduce mode, so files need be in hdfs
-        sh.exec("hadoop fs -put ${TEMP_DIR}/reuters-out ${WORK_DIR}/reuters-out");
-        assertEquals("Unable to put reuters-out-seqdir in hdfs", 0, sh.getRet());
-
-        assertRun("seqdirectory -i ${TEMP_DIR}/reuters-out -o ${TEMP_DIR}/reuters-out-seqdir -c UTF-8 -chunk 5");
-        assertEquals("Unexpected error from running mahout", 0, sh.getRet());
-        /*
-        // reuters-out-seqdir exists on a local disk at this point,
-        // copy it to hdfs
-        rmr("${WORK_DIR}/reuters-out-seqdir");
-        sh.exec("hadoop fs -put ${TEMP_DIR}/reuters-out-seqdir ${WORK_DIR}/reuters-out-seqdir");
-        assertEquals("Unable to put reuters-out-seqdir in hdfs", 0, sh.getRet());
-        */
-        assertRun("""seq2sparse \
+    assertRun("""seq2sparse \
 -i ${WORK_DIR}/reuters-out-seqdir/ \
 -o ${WORK_DIR}/reuters-out-seqdir-sparse-lda \
 -wt tf -seq -nr 3 --namedVector""");
 
-        sh.exec("hadoop fs -mkdir ${WORK_DIR}/reuters-lda");
-        assertEquals("Unable to make dir reuters-lda in hdfs", 0, sh.getRet());
+    sh.exec("hadoop fs -mkdir ${WORK_DIR}/reuters-lda");
+    assertEquals("Unable to make dir reuters-lda in hdfs", 0, sh.getRet());
 
-        assertRun("""lda \
+    assertRun("""lda \
 -i ${WORK_DIR}/reuters-out-seqdir-sparse-lda/tf-vectors \
 -o ${WORK_DIR}/reuters-lda -k 20 -x 20 \
 && \
@@ -280,46 +280,46 @@ mahout ldatopics \
 -i ${WORK_DIR}/reuters-lda/state-20 \
 -d ${WORK_DIR}/reuters-out-seqdir-sparse-lda/dictionary.file-0 \
 -dt sequencefile""");
-    }
+  }
 
-    /**
-    * Note that this test doesnt work on some older mahout versions.
-    */
-    @Test(timeout=9000000L)
-    public void testBayesNewsgroupClassifier() {
-        // put bayes-train-input and bayes-test-input in hdfs
-        sh.exec("hadoop fs -mkdir ${WORK_DIR}/20news-vectors");
-        sh.exec("hadoop fs -put ${TEMP_DIR}/20news-all ${WORK_DIR}/20news-all");
-        assertEquals("Unable to put bayes-train-input in hdfs", 0, sh.getRet());
+  /**
+   * Note that this test doesnt work on some older mahout versions.
+   */
+  @Test(timeout = 9000000L)
+  public void testBayesNewsgroupClassifier() {
+    // put bayes-train-input and bayes-test-input in hdfs
+    sh.exec("hadoop fs -mkdir ${WORK_DIR}/20news-vectors");
+    sh.exec("hadoop fs -put ${TEMP_DIR}/20news-all ${WORK_DIR}/20news-all");
+    assertEquals("Unable to put bayes-train-input in hdfs", 0, sh.getRet());
 
-        assertRun("seqdirectory -i ${WORK_DIR}/20news-all -o ${WORK_DIR}/20news-seq");
-        assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    assertRun("seqdirectory -i ${WORK_DIR}/20news-all -o ${WORK_DIR}/20news-seq");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
 
-        assertRun("seq2sparse -i ${WORK_DIR}/20news-seq -o ${WORK_DIR}/20news-vectors  -lnorm -nv  -wt tfidf");
-        assertEquals("Unexpected error from running mahout", 0, sh.getRet());
+    assertRun("seq2sparse -i ${WORK_DIR}/20news-seq -o ${WORK_DIR}/20news-vectors  -lnorm -nv  -wt tfidf");
+    assertEquals("Unexpected error from running mahout", 0, sh.getRet());
 
-        assertRun("""split \
+    assertRun("""split \
 -i ${WORK_DIR}/20news-vectors/tfidf-vectors \
 --trainingOutput ${WORK_DIR}/20news-train-vectors \
 --testOutput ${WORK_DIR}/20news-test-vectors \
 --randomSelectionPct 40 --overwrite --sequenceFiles -xm sequential""");
 
-        assertRun("""trainnb \
+    assertRun("""trainnb \
 -i ${WORK_DIR}/20news-train-vectors -el \
 -o ${WORK_DIR}/model \
 -li ${WORK_DIR}/labelindex \
 -ow""");
 
-        assertRun("""testnb \
+    assertRun("""testnb \
 -i ${WORK_DIR}/20news-train-vectors \
 -m ${WORK_DIR}/model \
 -l ${WORK_DIR}/labelindex \
 -ow -o ${WORK_DIR}/20news-testing""");
 
-        assertRun("""testnb \
+    assertRun("""testnb \
 -i ${WORK_DIR}/20news-test-vectors \
 -m ${WORK_DIR}/model \
 -l ${WORK_DIR}/labelindex \
 -ow -o ${WORK_DIR}/20news-testing""");
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy b/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy
index 7d1a90c..657dd65 100644
--- a/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy
+++ b/bigtop-tests/test-artifacts/oozie/src/main/groovy/org/apache/bigtop/itest/ooziesmoke/TestOozieSmoke.groovy
@@ -51,90 +51,90 @@ class TestOozieSmoke {
     assertNotNull("namenode hostname isn't set", namenode)
 
     oozie_tar_home = System.getProperty("org.apache.bigtop.itest.oozie_tar_home",
-                                        (new File("/usr/share/doc/packages/oozie/")).exists() ?
-                                           "/usr/share/doc/packages/oozie/" :
-                                           "/usr/share/doc/oozie*/");
+      (new File("/usr/share/doc/packages/oozie/")).exists() ?
+        "/usr/share/doc/packages/oozie/" :
+        "/usr/share/doc/oozie*/");
 
     sh.exec("mkdir /tmp/${tmp_dir}",
-            "cd /tmp/${tmp_dir}",
-            "tar xzf ${oozie_tar_home}/oozie-examples.tar.gz",
-            "hadoop fs -mkdir ${tmp_dir}",
-            "hadoop fs -put examples ${tmp_dir}");
+      "cd /tmp/${tmp_dir}",
+      "tar xzf ${oozie_tar_home}/oozie-examples.tar.gz",
+      "hadoop fs -mkdir ${tmp_dir}",
+      "hadoop fs -put examples ${tmp_dir}");
     assertEquals("Failed to put examples onto HDFS",
-                 0, sh.ret);
+      0, sh.ret);
   }
 
   @AfterClass
   static void tearDown() {
     sh.exec("rm -rf /tmp/${tmp_dir}",
-            "hadoop fs -rmr ${tmp_dir}");
+      "hadoop fs -rmr ${tmp_dir}");
   }
 
   void testOozieExamplesCommon(String testname) {
     sh.exec("oozie job -oozie ${oozie_url} -run -DjobTracker=${resourcemanager} -DnameNode=${namenode} " +
-            "-DexamplesRoot=${tmp_dir}/examples -config /tmp/${tmp_dir}/examples/apps/${testname}/job.properties");
+      "-DexamplesRoot=${tmp_dir}/examples -config /tmp/${tmp_dir}/examples/apps/${testname}/job.properties");
     assertEquals("Oozie job submition ${testname} failed",
-                 0, sh.ret);
+      0, sh.ret);
 
-    String jobId = sh.out[0].replaceAll(/job: /,"");
+    String jobId = sh.out[0].replaceAll(/job: /, "");
     while (sh.exec("oozie job -oozie ${oozie_url} -info ${jobId}").out.join(' ') =~ /Status\s*:\s*RUNNING/) {
       sleep(WAIT_TIMEOUT);
     }
     assertTrue("Oozie job ${testname} returned ${sh.out.join(' ')} instead of SUCCEEDED",
-               (sh.out.join(' ') =~ /Status\s*:\s*SUCCEEDED/).find());
+      (sh.out.join(' ') =~ /Status\s*:\s*SUCCEEDED/).find());
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testNoOp() {
     testOozieExamplesCommon("no-op");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testJavaMain() {
     testOozieExamplesCommon("java-main");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testMapReduce() {
     testOozieExamplesCommon("map-reduce");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testCustomMain() {
     testOozieExamplesCommon("custom-main");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testHadoopEl() {
     testOozieExamplesCommon("hadoop-el");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testStreaming() {
     testOozieExamplesCommon("streaming");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testPig() {
     testOozieExamplesCommon("pig");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testHive() {
     testOozieExamplesCommon("hive");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSubwf() {
     testOozieExamplesCommon("subwf");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testSsh() {
     // testOozieExamplesCommon("ssh");
   }
 
-  @Test(timeout=300000L)
+  @Test(timeout = 300000L)
   public void testDemo() {
     // testOozieExamplesCommon("demo");
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/BTServices.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/BTServices.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/BTServices.groovy
index 4d1161c..b217c72 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/BTServices.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/BTServices.groovy
@@ -21,66 +21,66 @@ package org.apache.bigtop.itest.packagesmoke
 import org.apache.bigtop.itest.pmanager.PackageManager
 
 class BTServices {
-  static final List serviceDaemonUserNames = [ "flume", "hbase" , "hdfs" , "hue" , "mapred" , "oozie" , "sqoop" ,
-                                               "zookeeper", "hadoop"];
+  static final List serviceDaemonUserNames = ["flume", "hbase", "hdfs", "hue", "mapred", "oozie", "sqoop",
+    "zookeeper", "hadoop"];
 
   static final Map components = [
-                     HDFS           : [ services : [ "hadoop-namenode", "hadoop-datanode",
-                                                     "hadoop-secondarynamenode" ],
-                                        verifier : new StateVerifierHDFS(),
-                                        killIDs  : [ "hdfs" ],
-                                      ],
-                     mapreduce      : [ services : [ "hadoop-namenode", "hadoop-datanode",
-                                                     "hadoop-jobtracker", "hadoop-tasktracker" ],
-                                        killIDs  : [ "hdfs", "mapred" ],
-                                        verifier : new StateVerifierMapreduce(),
-                                      ],
-                     hive           : [ services : [ "hadoop-namenode", "hadoop-datanode",
-                                                     "hadoop-jobtracker", "hadoop-tasktracker" ],
-                                        killIDs  : [ "hdfs", "mapred" ],
-                                        verifier : new StateVerifierHive(),
-                                      ],
-                     HBase          : [ services : [ "hadoop-namenode", "hadoop-datanode",
-                                                     "hbase-master" ],
-                                        killIDs  : [ "hdfs", "hbase" ],
-                                        verifier : new StateVerifierHBase(),
-                                      ],
-                     zookeeper      : [ services : [ "hadoop-zookeeper" ],
-                                        verifier : new StateVerifierZookeeper(),
-                                        killIDs  : [ "zookeeper" ],
-                                      ],
-                     oozie          : [ services : [ "hadoop-namenode", "hadoop-datanode", "hadoop-jobtracker", "hadoop-tasktracker",
-                                                     "oozie" ],
-                                        killIDs  : [ "hdfs", "mapred", "oozie" ],
-                                        verifier : new StateVerifierOozie(),
-                                      ],
-                     flume          : [ services : [ "hadoop-namenode", "hadoop-datanode",
-                                                     "flume-master", "flume-agent" ],
-                                        killIDs  : [ "hdfs", "flume" ],
-                                        verifier : new StateVerifierFlume(),
-                                      ],
-                     sqoop          : [ services : [ "hadoop-namenode", "hadoop-datanode",
-                                                     "sqoop-metastore" ],
-                                        killIDs  : [ "hdfs", "sqoop" ],
-                                        verifier : new StateVerifierSqoop(),
-                                      ],
-                     hue            : [ services : [ "hadoop-namenode", "hadoop-datanode", "hadoop-jobtracker", "hadoop-tasktracker",
-                                                     "hue" ],
-                                        killIDs  : [ "hdfs", "mapred", "hue" ],
-                                        verifier : new StateVerifierHue(),
-                                      ],
-                   ];
+    HDFS: [services: ["hadoop-namenode", "hadoop-datanode",
+      "hadoop-secondarynamenode"],
+      verifier: new StateVerifierHDFS(),
+      killIDs: ["hdfs"],
+    ],
+    mapreduce: [services: ["hadoop-namenode", "hadoop-datanode",
+      "hadoop-jobtracker", "hadoop-tasktracker"],
+      killIDs: ["hdfs", "mapred"],
+      verifier: new StateVerifierMapreduce(),
+    ],
+    hive: [services: ["hadoop-namenode", "hadoop-datanode",
+      "hadoop-jobtracker", "hadoop-tasktracker"],
+      killIDs: ["hdfs", "mapred"],
+      verifier: new StateVerifierHive(),
+    ],
+    HBase: [services: ["hadoop-namenode", "hadoop-datanode",
+      "hbase-master"],
+      killIDs: ["hdfs", "hbase"],
+      verifier: new StateVerifierHBase(),
+    ],
+    zookeeper: [services: ["hadoop-zookeeper"],
+      verifier: new StateVerifierZookeeper(),
+      killIDs: ["zookeeper"],
+    ],
+    oozie: [services: ["hadoop-namenode", "hadoop-datanode", "hadoop-jobtracker", "hadoop-tasktracker",
+      "oozie"],
+      killIDs: ["hdfs", "mapred", "oozie"],
+      verifier: new StateVerifierOozie(),
+    ],
+    flume: [services: ["hadoop-namenode", "hadoop-datanode",
+      "flume-master", "flume-agent"],
+      killIDs: ["hdfs", "flume"],
+      verifier: new StateVerifierFlume(),
+    ],
+    sqoop: [services: ["hadoop-namenode", "hadoop-datanode",
+      "sqoop-metastore"],
+      killIDs: ["hdfs", "sqoop"],
+      verifier: new StateVerifierSqoop(),
+    ],
+    hue: [services: ["hadoop-namenode", "hadoop-datanode", "hadoop-jobtracker", "hadoop-tasktracker",
+      "hue"],
+      killIDs: ["hdfs", "mapred", "hue"],
+      verifier: new StateVerifierHue(),
+    ],
+  ];
 
   static final Map<String, List<String>> release2services = [
-                     "bigtop"       : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume" ],
-                     "2"            : [ "HDFS", "mapreduce", "hive" ],
-                     "3b2"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume",          "hue" ],
-                     "3b3"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3b4"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3u0"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3u1"          : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue" ],
-                     "3"            : [ "HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume" ],
-                   ];
+    "bigtop": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume"],
+    "2": ["HDFS", "mapreduce", "hive"],
+    "3b2": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "hue"],
+    "3b3": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue"],
+    "3b4": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue"],
+    "3u0": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue"],
+    "3u1": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume", "sqoop", "hue"],
+    "3": ["HDFS", "mapreduce", "hive", "HBase", "zookeeper", "oozie", "flume"],
+  ];
 
   public static Map getServices(String release) {
     Map res = [:];
@@ -98,16 +98,16 @@ class BTServices {
     }
 
     services.each {
-        // zookeeper is a very messy case of naming :-(
-        if (it == "zookeeper" &&
-            (PackageManager.getPackageManager().type == 'apt' ||
-             release == "3" || release == "3u1" || release == "bigtop")) {
-          res[it] = [ services : [ "hadoop-zookeeper-server" ],
-                      verifier : new StateVerifierZookeeper(),
-                    ];
-        } else {
-          res[it] = components[it];
-        }
+      // zookeeper is a very messy case of naming :-(
+      if (it == "zookeeper" &&
+        (PackageManager.getPackageManager().type == 'apt' ||
+          release == "3" || release == "3u1" || release == "bigtop")) {
+        res[it] = [services: ["hadoop-zookeeper-server"],
+          verifier: new StateVerifierZookeeper(),
+        ];
+      } else {
+        res[it] = components[it];
+      }
     }
     return res;
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHUpgradeSequence.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHUpgradeSequence.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHUpgradeSequence.groovy
index cbc4396..025f284 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHUpgradeSequence.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/CDHUpgradeSequence.groovy
@@ -44,11 +44,11 @@ class CDHUpgradeSequence {
                      service hadoop-0.20-datanode start                              &&
                      sleep 61
                   """).getRet() +
-      shHDFS.exec("""hadoop dfsadmin -finalizeUpgrade &&
+        shHDFS.exec("""hadoop dfsadmin -finalizeUpgrade &&
                      hadoop fs -chown mapred /mapred/system || /bin/true
                   """).getRet() +
-      shRoot.exec("service hadoop-0.20-datanode stop").getRet() +
-      shRoot.exec("service hadoop-0.20-namenode stop").getRet();
+        shRoot.exec("service hadoop-0.20-datanode stop").getRet() +
+        shRoot.exec("service hadoop-0.20-namenode stop").getRet();
     }
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy
index b885f9b..993ed41 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/DeployCDH.groovy
@@ -27,65 +27,65 @@ import org.junit.rules.ErrorCollector
 import static org.hamcrest.CoreMatchers.equalTo
 
 class DeployCDH {
-    List<String> cdh2 = [
-     "hadoop-0.20", "hadoop-0.20-conf-pseudo", "hadoop-0.20-datanode",
-     "hadoop-0.20-fuse", "hadoop-0.20-jobtracker", "hadoop-0.20-namenode", "hadoop-0.20-native",
-     "hadoop-0.20-pipes", "hadoop-0.20-secondarynamenode", "hadoop-0.20-source",
-     "hadoop-0.20-tasktracker", "hadoop-hive", "hadoop-pig" ];
-    List<String> cdh3b2 = [
-     "flume", "flume-master", "flume-agent", "hadoop-0.20", "hadoop-0.20-conf-pseudo", "hadoop-0.20-conf-pseudo-hue",
-     "hadoop-0.20-datanode", "hadoop-0.20-fuse", "hadoop-0.20-jobtracker", "hadoop-0.20-namenode", "hadoop-0.20-native",
-     "hadoop-0.20-pipes", "hadoop-0.20-secondarynamenode", "hadoop-0.20-source",
-     "hadoop-0.20-tasktracker", "hadoop-hbase",
-     "hadoop-hbase-master", "hadoop-hbase-regionserver", "hadoop-hbase-thrift", "hadoop-hive", "hadoop-pig",
-     "hadoop-zookeeper", "hadoop-zookeeper-server", "hue", "hue-about", "hue-beeswax", "hue-common",
-     "hue-filebrowser", "hue-help", "hue-jobbrowser", "hue-jobsub", "hue-plugins", "hue-proxy",
-     "hue-useradmin", "oozie", "sqoop" ];
-    List<String> cdh3b3 = [
-     "flume", "flume-master", "flume-agent", "hadoop-0.20", "hadoop-0.20-conf-pseudo",
-     "hadoop-0.20-datanode", "hadoop-0.20-fuse", "hadoop-0.20-jobtracker", "hadoop-0.20-namenode", "hadoop-0.20-native",
-     "hadoop-0.20-pipes", "hadoop-0.20-sbin", "hadoop-0.20-secondarynamenode", "hadoop-0.20-source",
-     "hadoop-0.20-tasktracker", "hadoop-hbase", "hadoop-hbase-doc",
-     "hadoop-hbase-master", "hadoop-hbase-regionserver", "hadoop-hbase-thrift", "hadoop-hive", "hadoop-pig",
-     "hadoop-zookeeper", "hadoop-zookeeper-server", "hue", "hue-about", "hue-beeswax", "hue-common",
-     "hue-filebrowser", "hue-help", "hue-jobbrowser", "hue-jobsub", "hue-plugins", "hue-proxy",
-     "hue-useradmin", "oozie", "oozie-client", "sqoop", "sqoop-metastore" ];
+  List<String> cdh2 = [
+    "hadoop-0.20", "hadoop-0.20-conf-pseudo", "hadoop-0.20-datanode",
+    "hadoop-0.20-fuse", "hadoop-0.20-jobtracker", "hadoop-0.20-namenode", "hadoop-0.20-native",
+    "hadoop-0.20-pipes", "hadoop-0.20-secondarynamenode", "hadoop-0.20-source",
+    "hadoop-0.20-tasktracker", "hadoop-hive", "hadoop-pig"];
+  List<String> cdh3b2 = [
+    "flume", "flume-master", "flume-agent", "hadoop-0.20", "hadoop-0.20-conf-pseudo", "hadoop-0.20-conf-pseudo-hue",
+    "hadoop-0.20-datanode", "hadoop-0.20-fuse", "hadoop-0.20-jobtracker", "hadoop-0.20-namenode", "hadoop-0.20-native",
+    "hadoop-0.20-pipes", "hadoop-0.20-secondarynamenode", "hadoop-0.20-source",
+    "hadoop-0.20-tasktracker", "hadoop-hbase",
+    "hadoop-hbase-master", "hadoop-hbase-regionserver", "hadoop-hbase-thrift", "hadoop-hive", "hadoop-pig",
+    "hadoop-zookeeper", "hadoop-zookeeper-server", "hue", "hue-about", "hue-beeswax", "hue-common",
+    "hue-filebrowser", "hue-help", "hue-jobbrowser", "hue-jobsub", "hue-plugins", "hue-proxy",
+    "hue-useradmin", "oozie", "sqoop"];
+  List<String> cdh3b3 = [
+    "flume", "flume-master", "flume-agent", "hadoop-0.20", "hadoop-0.20-conf-pseudo",
+    "hadoop-0.20-datanode", "hadoop-0.20-fuse", "hadoop-0.20-jobtracker", "hadoop-0.20-namenode", "hadoop-0.20-native",
+    "hadoop-0.20-pipes", "hadoop-0.20-sbin", "hadoop-0.20-secondarynamenode", "hadoop-0.20-source",
+    "hadoop-0.20-tasktracker", "hadoop-hbase", "hadoop-hbase-doc",
+    "hadoop-hbase-master", "hadoop-hbase-regionserver", "hadoop-hbase-thrift", "hadoop-hive", "hadoop-pig",
+    "hadoop-zookeeper", "hadoop-zookeeper-server", "hue", "hue-about", "hue-beeswax", "hue-common",
+    "hue-filebrowser", "hue-help", "hue-jobbrowser", "hue-jobsub", "hue-plugins", "hue-proxy",
+    "hue-useradmin", "oozie", "oozie-client", "sqoop", "sqoop-metastore"];
 
-    List<String> aptPkg = [ "hadoop-0.20-doc", "libhdfs0", "libhdfs0-dev", "python-hive" ];
-    List<String> yumPkg = [ "hadoop-0.20-debuginfo", "hadoop-0.20-libhdfs" ];
-    List<String> zypperPkg = [ "hadoop-0.20-libhdfs", "hadoop-0.20-doc" ];
+  List<String> aptPkg = ["hadoop-0.20-doc", "libhdfs0", "libhdfs0-dev", "python-hive"];
+  List<String> yumPkg = ["hadoop-0.20-debuginfo", "hadoop-0.20-libhdfs"];
+  List<String> zypperPkg = ["hadoop-0.20-libhdfs", "hadoop-0.20-doc"];
 
-    Map<String, LinkedHashMap<String, Collection>> distPackages = [
-      "2" : [ "apt" : cdh2 + aptPkg,
-              "yum" : cdh2 + yumPkg + [ "hadoop-0.20-docs", "hadoop-hive-webinterface" ],
-               "zypper" : [],
-              // "cloudera-desktop", "cloudera-desktop-plugins",
-            ],
-    "3b2" : [ "apt" : cdh3b2 + aptPkg,
-              "yum" : cdh3b2 + yumPkg + [ "hadoop-0.20-docs", "hadoop-hive-webinterface" ],
-              "zypper" : [],
-            ],
-    "3b3" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
-              "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-docs", "hadoop-hive-webinterface" ],
-              "zypper" : [],
-            ],
-    "3b4" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
-              "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc",  "hadoop-hive-webinterface" ],
-              "zypper" : cdh3b3 + zypperPkg,
-            ],
-    "3"   : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
-              "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc" ],
-              "zypper" : cdh3b3 + zypperPkg,
-            ],
-    "3u0" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
-              "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc" ],
-              "zypper" : cdh3b3 + zypperPkg,
-            ],
-    "3u1" : [ "apt" : cdh3b3 + aptPkg + [ "hadoop-hbase-doc",  ],
-              "yum" : cdh3b3 + yumPkg + [ "hadoop-0.20-doc" ],
-              "zypper" : cdh3b3 + zypperPkg,
-            ],
-    ];
+  Map<String, LinkedHashMap<String, Collection>> distPackages = [
+    "2": ["apt": cdh2 + aptPkg,
+      "yum": cdh2 + yumPkg + ["hadoop-0.20-docs", "hadoop-hive-webinterface"],
+      "zypper": [],
+      // "cloudera-desktop", "cloudera-desktop-plugins",
+    ],
+    "3b2": ["apt": cdh3b2 + aptPkg,
+      "yum": cdh3b2 + yumPkg + ["hadoop-0.20-docs", "hadoop-hive-webinterface"],
+      "zypper": [],
+    ],
+    "3b3": ["apt": cdh3b3 + aptPkg + ["hadoop-hbase-doc",],
+      "yum": cdh3b3 + yumPkg + ["hadoop-0.20-docs", "hadoop-hive-webinterface"],
+      "zypper": [],
+    ],
+    "3b4": ["apt": cdh3b3 + aptPkg + ["hadoop-hbase-doc",],
+      "yum": cdh3b3 + yumPkg + ["hadoop-0.20-doc", "hadoop-hive-webinterface"],
+      "zypper": cdh3b3 + zypperPkg,
+    ],
+    "3": ["apt": cdh3b3 + aptPkg + ["hadoop-hbase-doc",],
+      "yum": cdh3b3 + yumPkg + ["hadoop-0.20-doc"],
+      "zypper": cdh3b3 + zypperPkg,
+    ],
+    "3u0": ["apt": cdh3b3 + aptPkg + ["hadoop-hbase-doc",],
+      "yum": cdh3b3 + yumPkg + ["hadoop-0.20-doc"],
+      "zypper": cdh3b3 + zypperPkg,
+    ],
+    "3u1": ["apt": cdh3b3 + aptPkg + ["hadoop-hbase-doc",],
+      "yum": cdh3b3 + yumPkg + ["hadoop-0.20-doc"],
+      "zypper": cdh3b3 + zypperPkg,
+    ],
+  ];
 
   @Rule
   public ErrorCollector errors = new ErrorCollector();
@@ -99,7 +99,7 @@ class DeployCDH {
 
     oldRepo.addRepo();
     checkThat("failed to add repository for pre-upgrade CDH deployment",
-              oldRepo.getPm().refresh(), equalTo(0));
+      oldRepo.getPm().refresh(), equalTo(0));
 
     // Lets try to remove existing packages -- just in case
     List stalePkgs = [];
@@ -115,7 +115,7 @@ class DeployCDH {
       // We don't want to install them for a second time.
       if (!pkg.isInstalled()) {
         checkThat("failed to install required package ${pkg.getName()}",
-                  pkg.install(), equalTo(0));
+          pkg.install(), equalTo(0));
       }
       pkg.getServices().each { it.value.stop(); }
     }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy
index 0dae5d3..053ce1e 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestCommon.groovy
@@ -45,11 +45,11 @@ class PackageTestCommon {
   }
 
   public void checkThatService(String msg, Service svc, Matcher<Object> matcher) {
-    if (PackageTestErrorProxy.checkEquals(svcStatusDecoder(svc.status()), matcher) == true ) {
+    if (PackageTestErrorProxy.checkEquals(svcStatusDecoder(svc.status()), matcher) == true) {
       return;
     } else {
       sleep(3001);
-      if (PackageTestErrorProxy.checkEquals(svcStatusDecoder(svc.status()), matcher) == true ) {
+      if (PackageTestErrorProxy.checkEquals(svcStatusDecoder(svc.status()), matcher) == true) {
         return;
       } else {
         sleep(3001);
@@ -63,7 +63,7 @@ class PackageTestCommon {
   }
 
   String formatDescription(String description, String summary) {
-    return ((summary ?: "") + ' ' + description).replaceAll(/\s+/,' ').replaceAll(/\s\.\s/,' ').replaceAll(/\s\.$/,' ').trim();
+    return ((summary ?: "") + ' ' + description).replaceAll(/\s+/, ' ').replaceAll(/\s\.\s/, ' ').replaceAll(/\s\.$/, ' ').trim();
   }
 
   private void checkMetadataInternal(PackageInstance pkg, Map expected_metadata) {
@@ -77,7 +77,7 @@ class PackageTestCommon {
           expected = actual;
         } else {
           expected = formatDescription(expected, null);
-          actual = formatDescription(actual, null); 
+          actual = formatDescription(actual, null);
         }
       }
       if (key == "description") {
@@ -86,7 +86,7 @@ class PackageTestCommon {
       }
 
       checkThat("checking $key on package $name",
-                actual, equalTo(expected));
+        actual, equalTo(expected));
     }
   }
 
@@ -120,14 +120,14 @@ class PackageTestCommon {
     }
 
     checkThat("a set of dependencies of package $name is different from what was expected",
-              pkgDeps, hasSameKeys(expected_deps));
+      pkgDeps, hasSameKeys(expected_deps));
 
     expected_deps.each { depName, version ->
       if (version == "/self") {
         PackageInstance dep = PackageInstance.getPackageInstance(pm, depName);
         dep.refresh();
         checkThat("checking that an expected dependecy $depName for the package $name has the same version",
-                  "${dep.getVersion()}-${dep.getRelease()}", equalTo("${pkg.getVersion()}-${pkg.getRelease()}"));
+          "${dep.getVersion()}-${dep.getRelease()}", equalTo("${pkg.getVersion()}-${pkg.getRelease()}"));
       }
       // checkThat("checking that and expected dependency $key for the package $name got pulled",
       //           dep.isInstalled(), equalTo(true));
@@ -156,49 +156,49 @@ class PackageTestCommon {
     }
 
     checkThat("wrong list of runlevels for service $name",
-              runlevels, hasSameKeys(svc_metadata.runlevel));
+      runlevels, hasSameKeys(svc_metadata.runlevel));
 
     checkThatService("wrong state of service $name after installation",
-              svc, equalTo(svc_metadata.oninstall));
+      svc, equalTo(svc_metadata.oninstall));
 
     svc.stop();
     sleep(3001);
     checkThatService("service $name is expected to be stopped",
-              svc, equalTo("stop"));
+      svc, equalTo("stop"));
 
     if (svc_metadata.configured == "true") {
       checkThat("can not start service $name",
-                svc.start(), equalTo(0));
+        svc.start(), equalTo(0));
       sleep(3001);
       checkThatService("service $name is expected to be started",
-                svc, equalTo("start"));
+        svc, equalTo("start"));
 
       checkThat("can not restart service $name",
-                svc.restart(), equalTo(0));
+        svc.restart(), equalTo(0));
       sleep(3001);
       checkThatService("service $name is expected to be re-started",
-                svc, equalTo("start"));
+        svc, equalTo("start"));
 
       checkThat("can not stop service $name",
-                svc.stop(), equalTo(0));
+        svc.stop(), equalTo(0));
       sleep(3001);
       checkThatService("service $name is expected to be stopped",
-                svc, equalTo("stop"));
+        svc, equalTo("stop"));
     }
 
     // Stopping 2nd time (making sure that a stopped service is
     // not freaked out by an extra stop)
     checkThat("can not stop an already stopped service $name",
-              svc.stop(), equalTo(0));
+      svc.stop(), equalTo(0));
     checkThatService("wrong status after stopping service $name for the second time",
-              svc, equalTo("stop"));
+      svc, equalTo("stop"));
   }
 
   public void checkServices(Map expectedServices) {
     Map svcs = pm.getServices(pkg);
 
     checkThat("wrong list of services in a package $name",
-              expectedServices, hasSameKeys(svcs));
+      expectedServices, hasSameKeys(svcs));
 
     expectedServices.each { key, value ->
       if (svcs[key] != null) {
@@ -209,32 +209,32 @@ class PackageTestCommon {
 
   private void configService(Service svc, Map svc_metadata) {
     Shell shRoot = new Shell("/bin/bash", "root");
-      if (svc_metadata.config != null) {
-        def config = svc_metadata.config;
-        def configfile = svc_metadata.config.configfile;
-
-        def configcontent = "";
-        def property = new TreeMap(config.property);
-        property.keySet().eachWithIndex() {
-          v,j ->
+    if (svc_metadata.config != null) {
+      def config = svc_metadata.config;
+      def configfile = svc_metadata.config.configfile;
+
+      def configcontent = "";
+      def property = new TreeMap(config.property);
+      property.keySet().eachWithIndex() {
+        v, j ->
           configcontent = configcontent + "<property>";
           property.get(v).eachWithIndex() {
             obj, i ->
-            if(obj.toString().contains("name")) {
-              configcontent = configcontent + "<name>" + obj.toString()[5..-1] + "</name>";
-            } else {
-              configcontent = configcontent + "<value>" + obj.toString()[6..-1] + "</value>";
-            }
+              if (obj.toString().contains("name")) {
+                configcontent = configcontent + "<name>" + obj.toString()[5..-1] + "</name>";
+              } else {
+                configcontent = configcontent + "<value>" + obj.toString()[6..-1] + "</value>";
+              }
           };
           configcontent = configcontent + "</property>";
-        };
+      };
 
-        shRoot.exec("""sed -e '/\\/configuration/i \\ $configcontent' $configfile > temp.xml
+      shRoot.exec("""sed -e '/\\/configuration/i \\ $configcontent' $configfile > temp.xml
                        mv temp.xml $configfile""");
-      }
-      if (svc_metadata.init != null) {
-        svc.init();
-      }
+    }
+    if (svc_metadata.init != null) {
+      svc.init();
+    }
   }
 
   public void checkUsers(Map expectedUsers) {
@@ -244,11 +244,11 @@ class PackageTestCommon {
       Map user = ugi.getUsers()[key];
       if (user != null) {
         checkThat("checking user $key home directory",
-                  user.home, equalTo(value.home));
+          user.home, equalTo(value.home));
         checkThat("checking user $key description",
-                  user.descr.replaceAll(/,*$/, ""), equalTo(value.descr));
+          user.descr.replaceAll(/,*$/, ""), equalTo(value.descr));
         checkThat("checking user $key shell",
-                  user.shell, equalTo(value.shell));
+          user.shell, equalTo(value.shell));
       } else {
         recordFailure("package $name is epected to provide user $key");
       }
@@ -263,7 +263,7 @@ class PackageTestCommon {
       if (group != null) {
         (value.user instanceof List ? value.user : [value.user]).each {
           checkThat("group $key is expected to contain user $it",
-                    group.users.contains(it), equalTo(true));
+            group.users.contains(it), equalTo(true));
         }
       } else {
         recordFailure("package $name is epected to provide group $key");
@@ -276,19 +276,21 @@ class PackageTestCommon {
       Alternative alt = new Alternative(key);
       if (alt.getAlts().size() > 0) {
         checkThat("alternative link ${value.link} doesn't exist or does not point to /etc/alternatives",
-                  (new Shell()).exec("readlink ${value.link}").getOut().get(0),
-                  equalTo("/etc/alternatives/$key".toString()));
+          (new Shell()).exec("readlink ${value.link}").getOut().get(0),
+          equalTo("/etc/alternatives/$key".toString()));
 
         checkThat("alternative $key has incorrect status",
-                  alt.getStatus(), equalTo(value.status));
+          alt.getStatus(), equalTo(value.status));
         checkThat("alternative $key points to an unexpected target",
-                  alt.getValue(), equalTo(value.value));
+          alt.getValue(), equalTo(value.value));
 
         def altMap = [:];
-        ((value.alt instanceof List) ? value.alt : [value.alt]).each { altMap[it] = it; }
+        ((value.alt instanceof List) ? value.alt : [value.alt]).each {
+          altMap[it] = it;
+        }
 
         checkThat("alternative $key has incorrect set of targets",
-                  alt.getAlts(), hasSameKeys(altMap));
+          alt.getAlts(), hasSameKeys(altMap));
       } else {
         recordFailure("package $name is expected to provide alternative $key");
       }
@@ -299,7 +301,7 @@ class PackageTestCommon {
     List res = [];
     int i = 0;
     while (i + chunks < l.size()) {
-      res.add(l.subList(i, i+chunks));
+      res.add(l.subList(i, i + chunks));
       i += chunks;
     }
     res.add(l.subList(i, l.size()));
@@ -310,20 +312,20 @@ class PackageTestCommon {
     Map lsFiles = [:];
 
     sliceUp(files, 500).each { files_chunk ->
-    (new Shell()).exec("ls -ld '${files_chunk.join('\' \'')}'").out.each {
-      String fileName = it.replaceAll('^[^/]*/',"/");
-      def matcher = (it =~ /\S+/);
-
-      Map meta = [:];
-      if ((fileName =~ /->/).find()) {
-        meta.target = fileName.replaceAll(/^.*-> /, '');
-        fileName = fileName.replaceAll(/ ->.*$/, '');
+      (new Shell()).exec("ls -ld '${files_chunk.join('\' \'')}'").out.each {
+        String fileName = it.replaceAll('^[^/]*/', "/");
+        def matcher = (it =~ /\S+/);
+
+        Map meta = [:];
+        if ((fileName =~ /->/).find()) {
+          meta.target = fileName.replaceAll(/^.*-> /, '');
+          fileName = fileName.replaceAll(/ ->.*$/, '');
+        }
+        meta.perm = matcher[0].replace('.', '');
+        meta.user = matcher[2];
+        meta.group = matcher[3];
+        lsFiles[fileName] = meta;
       }
-      meta.perm  = matcher[0].replace('.', '');
-      meta.user  = matcher[2];
-      meta.group = matcher[3];
-      lsFiles[fileName] = meta;
-    }
     }
     return lsFiles;
   }
@@ -337,28 +339,28 @@ class PackageTestCommon {
     if (pm.type == "apt") {
       int curFile = 0;
       sliceUp(fileList, 500).each { fileList_chunk ->
-      sh.exec("dpkg -S '${fileList_chunk.join('\' \'')}'").out.each {
-        String n = it.replaceAll(/^.*: \//, "/");
-        while (fileList[curFile] != n) {
-          files[fileList[curFile]].owners = 0;
+        sh.exec("dpkg -S '${fileList_chunk.join('\' \'')}'").out.each {
+          String n = it.replaceAll(/^.*: \//, "/");
+          while (fileList[curFile] != n) {
+            files[fileList[curFile]].owners = 0;
+            curFile++;
+          }
+          files[n].owners = it.replaceAll(/: \/.*$/, "").split(',').size();
           curFile++;
         }
-        files[n].owners = it.replaceAll(/: \/.*$/,"").split(',').size();
-        curFile++;
-      }
       }
     } else {
       int curFile = -1;
       sliceUp(fileList, 500).each { fileList_chunk ->
-      sh.exec("rpm -qf /bin/cat '${fileList_chunk.join('\' /bin/cat \'')}'").out.each {
-        if ((it =~ /^coreutils/).find()) {
-          curFile++;
-          files[fileList[curFile]].owners = 0;
-        } else if (!(it =~ /not owned by any package/).find()) {
-          files[fileList[curFile]].owners++;
+        sh.exec("rpm -qf /bin/cat '${fileList_chunk.join('\' /bin/cat \'')}'").out.each {
+          if ((it =~ /^coreutils/).find()) {
+            curFile++;
+            files[fileList[curFile]].owners = 0;
+          } else if (!(it =~ /not owned by any package/).find()) {
+            files[fileList[curFile]].owners++;
+          }
         }
       }
-      }
     }
   }
 
@@ -377,19 +379,23 @@ class PackageTestCommon {
     Map configs = [:];
 
     pkg.getFiles().each { fName = formatFileName(it); files[fName] = fName; }
-    pkg.getConfigs().each { fName = formatFileName(it); configs[fName] = fName; files.remove(fName); }
-    pkg.getDocs().each { fName = formatFileName(it); docs[fName] = fName; files.remove(fName); }
+    pkg.getConfigs().each {
+      fName = formatFileName(it); configs[fName] = fName; files.remove(fName);
+    }
+    pkg.getDocs().each {
+      fName = formatFileName(it); docs[fName] = fName; files.remove(fName);
+    }
 
     if (pm.type == "apt" && doc != null) {
       file.putAll(doc);
     } else {
       checkThat("list of documentation files of package $name is different from what was expected",
-                docs, hasSameKeys(doc));
+        docs, hasSameKeys(doc));
     }
     checkThat("list of config files of package $name is different from what was expected",
-              configs, hasSameKeys(config));
+      configs, hasSameKeys(config));
     checkThat("list of regular files of package $name is different from what was expected",
-              files, hasSameKeys(file));
+      files, hasSameKeys(file));
 
     // TODO: we should probably iterate over a different set of files to include loose files as well
     List fileList = [];
@@ -413,19 +419,20 @@ class PackageTestCommon {
       Map meta = fileMeta[it];
       Map goldenMeta = goldenFileMeta[formatFileName(it)];
 
-      if (goldenMeta.owners != "-1") { // TODO: we shouldn't really skip anything even for multi-owned dirs
-      if (meta == null ||
+      if (goldenMeta.owners != "-1") {
+        // TODO: we shouldn't really skip anything even for multi-owned dirs
+        if (meta == null ||
           !meta.perm.equals(goldenMeta.perm) ||
           !meta.user.equals(goldenMeta.user) ||
           !meta.group.equals(goldenMeta.group) ||
           (goldenMeta.target != null && !goldenMeta.target.equals(meta.target)) ||
           (Integer.parseInt(goldenMeta.owners) == 1 && !meta.owners.toString().equals(goldenMeta.owners))) {
-        problemFiles.add(it);
-      }
+          problemFiles.add(it);
+        }
       }
     }
     checkThat("file metadata difference detected on the following files",
-                problemFiles, equalTo([]));
+      problemFiles, equalTo([]));
 
     // a bit of debug output
     def newManifest = new MarkupBuilder(new FileWriter("${pkg.name}.xml"));
@@ -441,9 +448,9 @@ class PackageTestCommon {
           int owners = meta.owners ?: -1;
 
           if (meta.target) {
-            "$node"(name : fName, owners : owners, perm : meta.perm, user : meta.user, group : meta.group, target : meta.target);
+            "$node"(name: fName, owners: owners, perm: meta.perm, user: meta.user, group: meta.group, target: meta.target);
           } else {
-            "$node"(name : fName, owners : owners, perm : meta.perm, user : meta.user, group : meta.group);
+            "$node"(name: fName, owners: owners, perm: meta.perm, user: meta.user, group: meta.group);
           }
         }
       }
@@ -456,32 +463,44 @@ class PackageTestCommon {
     // that we have a pretty weird policy on which pairs are supposed
     // to go together (short answer is -- not all of them).
     Map complimentaryPackages = [
-      "hadoop-0.20-sbin.x86_64"      : "hadoop-0.20-sbin.i386",
-      "hadoop-0.20-pipes.x86_64"     : "hadoop-0.20-pipes.i386",
-      "hadoop-0.20-native.x86_64"    : "hadoop-0.20-native.i386",
-      "hadoop-0.20-libhdfs.x86_64"   : "hadoop-0.20-libhdfs.i386",
-      "hadoop-0.20-debuginfo.x86_64" : "hadoop-0.20-debuginfo.i386",
+      "hadoop-0.20-sbin.x86_64": "hadoop-0.20-sbin.i386",
+      "hadoop-0.20-pipes.x86_64": "hadoop-0.20-pipes.i386",
+      "hadoop-0.20-native.x86_64": "hadoop-0.20-native.i386",
+      "hadoop-0.20-libhdfs.x86_64": "hadoop-0.20-libhdfs.i386",
+      "hadoop-0.20-debuginfo.x86_64": "hadoop-0.20-debuginfo.i386",
     ];
 
     if (complimentaryPackages[name] != null) {
       PackageInstance pkg386 = PackageInstance.getPackageInstance(pm, complimentaryPackages[name]);
 
       checkThat("complimentary native package ${pkg386.getName()} failed to be installed",
-                pkg386.install(), equalTo(0));
+        pkg386.install(), equalTo(0));
       checkThat("complimentary native package ${pkg386.getName()} failed to be removed",
-                pkg386.remove(), equalTo(0));
+        pkg386.remove(), equalTo(0));
     }
   }
 
   public void checkPackageFilesGotRemoved(Map files) {
     List allFiles = [];
-    (files.file   ?: [:]).each { if (it.value.owners == "1") { allFiles.add(it.key) } };
-    (files.doc    ?: [:]).each { if (it.value.owners == "1") { allFiles.add(it.key) } };
-    (files.config ?: [:]).each { if (it.value.owners == "1") { allFiles.add(it.key) } };
+    (files.file ?: [:]).each {
+      if (it.value.owners == "1") {
+        allFiles.add(it.key)
+      }
+    };
+    (files.doc ?: [:]).each {
+      if (it.value.owners == "1") {
+        allFiles.add(it.key)
+      }
+    };
+    (files.config ?: [:]).each {
+      if (it.value.owners == "1") {
+        allFiles.add(it.key)
+      }
+    };
 
     allFiles.each {
       checkThat("file $it still present after package is being removed",
-                (new File(it)).exists(), equalTo(false));
+        (new File(it)).exists(), equalTo(false));
     }
   }
 
@@ -490,7 +509,7 @@ class PackageTestCommon {
     // removal would succeed even when services are still running
     pkg.getServices().each { name, svc ->
       checkThat("can not start service $name",
-                svc.start(), equalTo(0));
+        svc.start(), equalTo(0));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestErrorProxy.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestErrorProxy.java b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestErrorProxy.java
index d11fd73..93d9d88 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestErrorProxy.java
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestErrorProxy.java
@@ -19,16 +19,18 @@ package org.apache.bigtop.itest.packagesmoke;
 
 import org.hamcrest.Matcher;
 import org.junit.rules.ErrorCollector;
+
 import java.util.concurrent.Callable;
+
 import static org.junit.Assert.assertThat;
 
 public class PackageTestErrorProxy {
   static public void checkThat(ErrorCollector ec, final String msg, final Object value, final Matcher<Object> matcher) {
     ec.checkSucceeds(new Callable<Object>() {
-        public Object call() throws Exception {
-            assertThat(msg, value, matcher);
-            return value;
-        }
+      public Object call() throws Exception {
+        assertThat(msg, value, matcher);
+        return value;
+      }
     });
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy
index af5155b..014a897 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/PackageTestRepoMgr.groovy
@@ -52,16 +52,16 @@ class PackageTestRepoMgr {
     btRepoHost = System.getProperty("${prefix}.host", "www.apache.org/dist/bigtop/stable/repos");
     btRepoVersion = System.getProperty("${prefix}.version", "bigtop");
 
-    Map btKeys  = [ yum    : "http://${btRepoHost}/GPG-KEY-bigtop",
-                     zypper : null,
-                     apt    : "http://${btRepoHost}/GPG-KEY-bigtop" ];
-    Map btRepos = [ yum    : "http://${btRepoHost}/centos6",
-                     zypper : "http://${btRepoHost}/sles11",
-                     apt    : "http://${btRepoHost}/precise/" ];
+    Map btKeys = [yum: "http://${btRepoHost}/GPG-KEY-bigtop",
+      zypper: null,
+      apt: "http://${btRepoHost}/GPG-KEY-bigtop"];
+    Map btRepos = [yum: "http://${btRepoHost}/centos6",
+      zypper: "http://${btRepoHost}/sles11",
+      apt: "http://${btRepoHost}/precise/"];
 
-    btRepoFileURL = System.getProperty("${prefix}.file.url.${linux_flavor.replaceAll(/\s/,'_')}",
-                       System.getProperty("${prefix}.file.url",
-                         "http://does.not.exist"));
+    btRepoFileURL = System.getProperty("${prefix}.file.url.${linux_flavor.replaceAll(/\s/, '_')}",
+      System.getProperty("${prefix}.file.url",
+        "http://does.not.exist"));
 
     btRepoURL = System.getProperty("${prefix}.url", btRepos[pm.getType()]);
     btKeyURL = System.getProperty("${prefix}.key.url", btKeys[pm.getType()]);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifier.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifier.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifier.groovy
index b2e7490..faa11ac 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifier.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifier.groovy
@@ -23,6 +23,7 @@ public class StateVerifier {
   }
 
   public void createState() {};
+
   public boolean verifyState() {
     return true;
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy
index ca7b183..7199b25 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierFlume.groovy
@@ -30,14 +30,14 @@ class StateVerifierFlume extends StateVerifier {
     String node;
     sleep(120001);
     shFlume.exec("connect localhost",
-                 "getnodestatus",
-                 "quit\n");
-    node = shFlume.getOut().join(' ').replaceAll(/ --> IDLE.*$/,'')
-                                     .replaceAll(/^.*Master knows about [0-9]* nodes /,'')
-                                     .trim();
+      "getnodestatus",
+      "quit\n");
+    node = shFlume.getOut().join(' ').replaceAll(/ --> IDLE.*$/, '')
+      .replaceAll(/^.*Master knows about [0-9]* nodes /, '')
+      .trim();
     shFlume.exec("connect localhost",
-                 "exec config $node 'text(\"/etc/group\")' 'collectorSink(\"hdfs://localhost/flume\",\"test\")'",
-                 "quit\n");
+      "exec config $node 'text(\"/etc/group\")' 'collectorSink(\"hdfs://localhost/flume\",\"test\")'",
+      "quit\n");
     sleep(5001);
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy
index fa1161f..ebd4845 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHBase.groovy
@@ -25,14 +25,14 @@ class StateVerifierHBase extends StateVerifier {
 
   public static void createStaticState() {
     shHBase.exec("create 't1', 'f1'",
-                 "put 't1', 'r1', 'f1:q', 'val'",
-                 "flush 't1'",
-                 "quit\n");
+      "put 't1', 'r1', 'f1:q', 'val'",
+      "flush 't1'",
+      "quit\n");
   }
 
   public static boolean verifyStaticState() {
     shHBase.exec("scan 't1'",
-                 "quit\n");
+      "quit\n");
 
     return (shHBase.getOut().join(' ') =~ /r1.*column=f1:q.*value=val/).find();
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy
index 72a908a..9339f0e 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierHue.groovy
@@ -25,7 +25,7 @@ class StateVerifierHue extends StateVerifier {
   final static String loginURL = "${hueServer}/accounts/login/";
   final static String checkURL = "${hueServer}/debug/check_config";
   final static String creds = "username=admin&password=admin";
-  final static List<String> checkApps = [ "about", "beeswax", "filebrowser", "help", "jobbrowser", "jobsub", "useradmin" ];
+  final static List<String> checkApps = ["about", "beeswax", "filebrowser", "help", "jobbrowser", "jobsub", "useradmin"];
 
   Shell sh = new Shell();
 
@@ -47,10 +47,10 @@ class StateVerifierHue extends StateVerifier {
     sessionId = sh.getOut().join('');
 
     sh.exec("curl -m 60 -b '${sessionId}' ${checkURL}");
-    res = (sh.getOut().grep( ~/.*All ok. Configuration check passed.*/ ).size() != 0)
+    res = (sh.getOut().grep(~/.*All ok. Configuration check passed.*/).size() != 0)
     checkApps.each {
       sh.exec("curl -m 60 -b '${sessionId}' ${hueServer}/${it}/");
-      res = res && (sh.getOut().grep( ~/.*Page Not Found.*/ ).size() == 0);
+      res = res && (sh.getOut().grep(~/.*Page Not Found.*/).size() == 0);
     }
     return res;
   }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierOozie.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierOozie.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierOozie.groovy
index ecfec16..7debb29 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierOozie.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierOozie.groovy
@@ -41,7 +41,7 @@ class StateVerifierOozie extends StateVerifier {
     sleep(5001);
 
     return (sh.exec("oozie job -oozie http://localhost:11000/oozie -info $jobID | grep -q 'Status *: SUCCEEDED'")
-              .getRet() == 0);
+      .getRet() == 0);
   }
 
   void createState() {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierSqoop.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierSqoop.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierSqoop.groovy
index 4c56fe7..834bf5d 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierSqoop.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierSqoop.groovy
@@ -36,4 +36,4 @@ class StateVerifierSqoop extends StateVerifier {
     boolean storeFound = (sh.exec("sqoop job ${remoteOpt} --show storeJob | grep -q '^Job: storeJob'").getRet() == 0);
     return (localFound && storeFound);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierZookeeper.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierZookeeper.groovy b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierZookeeper.groovy
index 203af7d..21b87a7 100644
--- a/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierZookeeper.groovy
+++ b/bigtop-tests/test-artifacts/package/src/main/groovy/org/apache/bigtop/itest/packagesmoke/StateVerifierZookeeper.groovy
@@ -25,12 +25,12 @@ class StateVerifierZookeeper extends StateVerifier {
 
   public void createState() {
     shZK.exec("create /bar bar123",
-              "quit\n");
+      "quit\n");
   }
 
   public boolean verifyState() {
     shZK.exec("get /bar",
-              "quit\n");
+      "quit\n");
     return (shZK.getOut().join(' ') =~ /bar123.*zk:.*CONNECTED/).find();
   }
 }
\ No newline at end of file