You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by db...@apache.org on 2015/02/12 06:48:49 UTC
[3/5] bigtop git commit: BIGTOP-1601. cleanup whitespaces across
test-artifacts
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
index 9700b08..6b6fe7d 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
@@ -46,9 +46,9 @@ class TestHadoopExamples {
JarContent.getJarName(HADOOP_MAPRED_HOME, 'hadoop.*examples.*.jar');
static {
assertNotNull("HADOOP_MAPRED_HOME has to be set to run this test",
- HADOOP_MAPRED_HOME);
+ HADOOP_MAPRED_HOME);
assertNotNull("HADOOP_CONF_DIR has to be set to run this test",
- HADOOP_CONF_DIR);
+ HADOOP_CONF_DIR);
assertNotNull("Can't find hadoop-examples.jar file", hadoopExamplesJar);
}
static final String HADOOP_EXAMPLES_JAR =
@@ -57,52 +57,52 @@ class TestHadoopExamples {
static Shell sh = new Shell("/bin/bash -s");
/**
- * Public so that we can run these tests as scripts
- * and the scripts can manually copy resoruces into DFS
- * See BIGTOP-1222 for example.
- */
- public static final String SOURCE ="bigtop-tests/test-artifacts/hadoop/src/main/resources/"
+ * Public so that we can run these tests as scripts
+ * and the scripts can manually copy resoruces into DFS
+ * See BIGTOP-1222 for example.
+ */
+ public static final String SOURCE = "bigtop-tests/test-artifacts/hadoop/src/main/resources/"
private static final String EXAMPLES = "examples";
private static final String EXAMPLES_OUT = "examples-output";
private static Configuration conf;
private static String mr_version = System.getProperty("mr.version", "mr2");
-
+
static final String RANDOMTEXTWRITER_TOTALBYTES = (mr_version == "mr1") ?
- "test.randomtextwrite.total_bytes" : "mapreduce.randomtextwriter.totalbytes";
+ "test.randomtextwrite.total_bytes" : "mapreduce.randomtextwriter.totalbytes";
@AfterClass
public static void tearDown() {
sh.exec("hadoop fs -rmr -skipTrash ${EXAMPLES}",
- "hadoop fs -rmr -skipTrash ${EXAMPLES_OUT}");
+ "hadoop fs -rmr -skipTrash ${EXAMPLES_OUT}");
}
@BeforeClass
static void setUp() {
conf = new Configuration();
- try{
- //copy examples/ int /user/root/ and
- //then create examples-output directory
- TestUtils.unpackTestResources(TestHadoopExamples.class, EXAMPLES, null, EXAMPLES_OUT);
+ try {
+ //copy examples/ int /user/root/ and
+ //then create examples-output directory
+ TestUtils.unpackTestResources(TestHadoopExamples.class, EXAMPLES, null, EXAMPLES_OUT);
+ }
+ catch (java.lang.Throwable t) {
+ LOG.info("Failed to unpack jar resources. Attemting to use bigtop sources");
+ def source = System.getenv("BIGTOP_HOME") + "/" + SOURCE;
+
+ assertNotNull("Can't copy test input files from bigtop source dir," +
+ "and jar specific attempt failed also", examples);
+
+ LOG.info("MAKING DIRECTORIES ..................... ${EXAMPLES} ${EXAMPLES_OUT}");
+
+ //add the files in resources/
+ sh.exec("hadoop fs -put ${source}/*.* .");
+ //add the directories under resources (like examples/)
+ sh.exec("hadoop fs -put ${source}/${EXAMPLES} ${EXAMPLES}");
+ sh.exec("hadoop fs -mkdir -p ${EXAMPLES_OUT}");
}
- catch(java.lang.Throwable t){
- LOG.info("Failed to unpack jar resources. Attemting to use bigtop sources");
- def source = System.getenv("BIGTOP_HOME")+"/"+SOURCE;
-
- assertNotNull("Can't copy test input files from bigtop source dir,"+
- "and jar specific attempt failed also", examples);
-
- LOG.info("MAKING DIRECTORIES ..................... ${EXAMPLES} ${EXAMPLES_OUT}");
-
- //add the files in resources/
- sh.exec("hadoop fs -put ${source}/*.* .");
- //add the directories under resources (like examples/)
- sh.exec("hadoop fs -put ${source}/${EXAMPLES} ${EXAMPLES}");
- sh.exec("hadoop fs -mkdir -p ${EXAMPLES_OUT}");
- }
- sh.exec("hadoop fs -ls ${EXAMPLES}");
- assertTrue("Failed asserting that 'examples' were created in the DFS", sh.getRet()==0);
+ sh.exec("hadoop fs -ls ${EXAMPLES}");
+ assertTrue("Failed asserting that 'examples' were created in the DFS", sh.getRet() == 0);
}
static long terasortid = System.currentTimeMillis();
@@ -115,17 +115,17 @@ class TestHadoopExamples {
public static String pi_samples = System.getProperty("pi_samples", "1000");
static LinkedHashMap examples =
[
- pi :"${pi_maps} ${pi_samples}",
- wordcount :"$EXAMPLES/text $EXAMPLES_OUT/wordcount",
- teragen :"${terasort_rows} teragen${terasortid}",
- terasort :"teragen${terasortid} terasort${terasortid}",
- teravalidate :"terasort${terasortid} tervalidate${terasortid}",
- multifilewc :"$EXAMPLES/text $EXAMPLES_OUT/multifilewc",
- aggregatewordcount:"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordcount 2 textinputformat",
- aggregatewordhist :"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordhist 2 textinputformat",
- grep :"$EXAMPLES/text $EXAMPLES_OUT/grep '[Cc]uriouser'",
- secondarysort :"$EXAMPLES/ints $EXAMPLES_OUT/secondarysort",
- randomtextwriter :"-D $RANDOMTEXTWRITER_TOTALBYTES=1073741824 $EXAMPLES_OUT/randomtextwriter"
+ pi: "${pi_maps} ${pi_samples}",
+ wordcount: "$EXAMPLES/text $EXAMPLES_OUT/wordcount",
+ teragen: "${terasort_rows} teragen${terasortid}",
+ terasort: "teragen${terasortid} terasort${terasortid}",
+ teravalidate: "terasort${terasortid} tervalidate${terasortid}",
+ multifilewc: "$EXAMPLES/text $EXAMPLES_OUT/multifilewc",
+ aggregatewordcount: "$EXAMPLES/text $EXAMPLES_OUT/aggregatewordcount 2 textinputformat",
+ aggregatewordhist: "$EXAMPLES/text $EXAMPLES_OUT/aggregatewordhist 2 textinputformat",
+ grep: "$EXAMPLES/text $EXAMPLES_OUT/grep '[Cc]uriouser'",
+ secondarysort: "$EXAMPLES/ints $EXAMPLES_OUT/secondarysort",
+ randomtextwriter: "-D $RANDOMTEXTWRITER_TOTALBYTES=1073741824 $EXAMPLES_OUT/randomtextwriter"
];
private String testName;
@@ -147,10 +147,10 @@ class TestHadoopExamples {
@Test
void testMRExample() {
- if(FailureVars.instance.getRunFailures()
- || FailureVars.instance.getServiceRestart()
- || FailureVars.instance.getServiceKill()
- || FailureVars.instance.getNetworkShutdown()) {
+ if (FailureVars.instance.getRunFailures()
+ || FailureVars.instance.getServiceRestart()
+ || FailureVars.instance.getServiceKill()
+ || FailureVars.instance.getNetworkShutdown()) {
runFailureThread();
}
sh.exec("hadoop jar $testJar $testName $testArgs");
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
index 40ad04d..7294197 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopSmoke.groovy
@@ -48,13 +48,13 @@ class TestHadoopSmoke {
static String nn = (new Configuration()).get(DFSConfigKeys.FS_DEFAULT_NAME_KEY)
String cmd = "hadoop jar ${STREAMING_JAR}" +
- " -D mapred.map.tasks=1 -D mapred.reduce.tasks=1 -D mapred.job.name=Experiment"
+ " -D mapred.map.tasks=1 -D mapred.reduce.tasks=1 -D mapred.job.name=Experiment"
String cmd2 = " -input ${testDir}/cachefile/input.txt -mapper map.sh -file map.sh -reducer cat" +
- " -output ${testDir}/cachefile/out -verbose"
+ " -output ${testDir}/cachefile/out -verbose"
String arg = "${nn}/user/${System.properties['user.name']}/${testDir}/cachefile/cachedir.jar#testlink"
@BeforeClass
- static void setUp() throws IOException {
+ static void setUp() throws IOException {
String[] inputFiles = ["cachedir.jar", "input.txt"];
try {
TestUtils.unpackTestResources(TestHadoopSmoke.class, "${testDir}/cachefile", inputFiles, null);
@@ -71,7 +71,7 @@ class TestHadoopSmoke {
@Test
void testCacheArchive() {
sh.exec("hadoop fs -rmr ${testDir}/cachefile/out",
- cmd + ' -cacheArchive ' + arg + cmd2)
+ cmd + ' -cacheArchive ' + arg + cmd2)
logError(sh)
sh.exec("hadoop fs -cat ${testDir}/cachefile/out/part-00000")
logError(sh)
@@ -82,7 +82,7 @@ class TestHadoopSmoke {
@Test
void testArchives() {
sh.exec("hadoop fs -rmr ${testDir}/cachefile/out",
- cmd + ' -archives ' + arg + cmd2)
+ cmd + ' -archives ' + arg + cmd2)
logError(sh)
sh.exec("hadoop fs -cat ${testDir}/cachefile/out/part-00000")
logError(sh)
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
index 48bb1ec..19bbd8f 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestNode.groovy
@@ -27,14 +27,14 @@ import org.apache.bigtop.itest.JarContent;
import org.apache.bigtop.itest.shell.Shell;
public class TestNode {
-
+
// set debugging variable to true if you want error messages sent to stdout
private static Shell sh = new Shell("/bin/bash");
@BeforeClass
public static void setUp() {
// unpack resource
- JarContent.unpackJarContainer(TestNode.class, "." , null);
+ JarContent.unpackJarContainer(TestNode.class, ".", null);
System.out.println("Running Node commands:");
}
@@ -43,10 +43,10 @@ public class TestNode {
}
@Test
- public void testNodeBasic() {
+ public void testNodeBasic() {
// list
- System.out.println("-list");
- sh.exec("YARN_ROOT_LOGGER=WARN,console yarn node -list");
+ System.out.println("-list");
+ sh.exec("YARN_ROOT_LOGGER=WARN,console yarn node -list");
assertTrue("-list failed", sh.getRet() == 0);
// status
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
index debbb16..d8fa74d 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/yarn/TestRmAdmin.groovy
@@ -27,14 +27,14 @@ import org.apache.bigtop.itest.JarContent;
import org.apache.bigtop.itest.shell.Shell;
public class TestRmAdmin {
-
+
// set debugging variable to true if you want error messages sent to stdout
private static Shell sh = new Shell("/bin/bash");
@BeforeClass
public static void setUp() {
// unpack resource
- JarContent.unpackJarContainer(TestRmAdmin.class, "." , null);
+ JarContent.unpackJarContainer(TestRmAdmin.class, ".", null);
System.out.println("Running RmAdmin commands:");
}
@@ -43,10 +43,10 @@ public class TestRmAdmin {
}
@Test
- public void testRmAdminBasic() {
+ public void testRmAdminBasic() {
// help
- System.out.println("-help");
- sh.exec("YARN_ROOT_LOGGER=WARN,console yarn rmadmin -help");
+ System.out.println("-help");
+ sh.exec("YARN_ROOT_LOGGER=WARN,console yarn rmadmin -help");
assertTrue("-help failed", sh.getRet() == 0);
// getGroups
@@ -54,7 +54,7 @@ public class TestRmAdmin {
sh.exec("YARN_ROOT_LOGGER=WARN,console yarn rmadmin -getGroups");
assertTrue("-getGroups failed", sh.getRet() == 0);
}
-
+
@Test
public void testRmAdminRefreshcommands() {
// refreshQueues
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
index d1cb391..657de61 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/IncrementalPELoad.java
@@ -47,16 +47,16 @@ public class IncrementalPELoad extends Configured implements Tool {
private static final int ROWSPERSPLIT = 1024;
private static final byte[][] FAMILIES
- = { Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
- Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
+ = {Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
+ Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
private int keyLength;
- private static final int KEYLEN_DEFAULT=10;
- private static final String KEYLEN_CONF="randomkv.key.length";
+ private static final int KEYLEN_DEFAULT = 10;
+ private static final String KEYLEN_CONF = "randomkv.key.length";
private int valLength;
- private static final int VALLEN_DEFAULT=10;
- private static final String VALLEN_CONF="randomkv.val.length";
+ private static final int VALLEN_DEFAULT = 10;
+ private static final String VALLEN_CONF = "randomkv.val.length";
@Override
protected void setup(Context context)
@@ -69,10 +69,9 @@ public class IncrementalPELoad extends Configured implements Tool {
}
protected void map(NullWritable n1, NullWritable n2,
- Mapper<NullWritable, NullWritable,
- ImmutableBytesWritable,KeyValue>.Context context)
- throws java.io.IOException ,InterruptedException
- {
+ Mapper<NullWritable, NullWritable,
+ ImmutableBytesWritable, KeyValue>.Context context)
+ throws java.io.IOException, InterruptedException {
byte keyBytes[] = new byte[keyLength];
byte valBytes[] = new byte[valLength];
@@ -85,7 +84,7 @@ public class IncrementalPELoad extends Configured implements Tool {
random.nextBytes(keyBytes);
// Ensure that unique tasks generate unique keys
- keyBytes[keyLength - 1] = (byte)(taskId & 0xFF);
+ keyBytes[keyLength - 1] = (byte) (taskId & 0xFF);
random.nextBytes(valBytes);
ImmutableBytesWritable key = new ImmutableBytesWritable(keyBytes);
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
index 47ea810..8b859f1 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestCopyTable.java
@@ -31,9 +31,12 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import org.junit.Test;
+
import static org.junit.Assert.assertTrue;
+
import org.apache.bigtop.itest.shell.Shell;
import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
+
import static org.apache.bigtop.itest.LogErrorsUtils.logError;
public class TestCopyTable {
@@ -48,7 +51,7 @@ public class TestCopyTable {
private static HTable origTable;
private static HTable copyTable;
private static String copyTableCmd =
- "hbase org.apache.hadoop.hbase.mapreduce.CopyTable";
+ "hbase org.apache.hadoop.hbase.mapreduce.CopyTable";
private static int NUM_ROWS = 5000;
private static Configuration conf;
@@ -60,12 +63,12 @@ public class TestCopyTable {
admin = new HBaseAdmin(conf);
HTableDescriptor htd_orig =
- HBaseTestUtil.createTestTableDescriptor("orig", TEST_FAMILY);
+ HBaseTestUtil.createTestTableDescriptor("orig", TEST_FAMILY);
admin.createTable(htd_orig);
orig = htd_orig.getName();
HTableDescriptor htd_copy =
- HBaseTestUtil.createTestTableDescriptor("copy", TEST_FAMILY);
+ HBaseTestUtil.createTestTableDescriptor("copy", TEST_FAMILY);
admin.createTable(htd_copy);
copy = htd_copy.getName();
@@ -97,13 +100,13 @@ public class TestCopyTable {
@Test
public void testCopyTable() throws Exception {
sh.exec(copyTableCmd + " --new.name=" + new String(copy) +
- " " + new String(orig));
+ " " + new String(orig));
logError(sh);
assertTrue(sh.getRet() == 0);
String origDigest = HBaseTestUtil.checksumRows(origTable);
String copyDigest = HBaseTestUtil.checksumRows(copyTable);
assertTrue("Original and copy tables contain different data",
- origDigest.equals(copyDigest));
+ origDigest.equals(copyDigest));
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
index c8391ac..23d7c3b 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseBalancer.groovy
@@ -56,7 +56,7 @@ class TestHBaseBalancer {
logError(sh)
assertTrue(sh.getRet() == 0)
assertTrue("balance_switch failed switching to true",
- sh.getOut().toString().indexOf("true") != -1)
+ sh.getOut().toString().indexOf("true") != -1)
// Return balancer switch to original state, and verify its
// previous state to be false.
@@ -64,6 +64,6 @@ class TestHBaseBalancer {
logError(sh)
assertTrue(sh.getRet() == 0)
assertTrue("balance_switch failed switching to false",
- sh.getOut().toString().indexOf("false") != -1)
+ sh.getOut().toString().indexOf("false") != -1)
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
index 6b63d3c..fba1d77 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseCompression.groovy
@@ -42,7 +42,7 @@ class TestHBaseCompression {
static void setUp() {
conf = new Configuration();
conf.addResource('mapred-site.xml');
- HADOOP_OPTIONS =
+ HADOOP_OPTIONS =
"-fs ${conf.get('fs.default.name')} -jt ${conf.get('mapred.job.tracker')}";
sh.exec("whoami");
String user = sh.out[0];
@@ -51,7 +51,7 @@ class TestHBaseCompression {
if (sh.getRet() == 0) {
sh.exec("hadoop fs $HADOOP_OPTIONS -rmr -skipTrash $OUTPUT");
assertTrue("Deletion of previous $OUTPUT from HDFS failed",
- sh.getRet() == 0);
+ sh.getRet() == 0);
}
sh.exec("hadoop fs $HADOOP_OPTIONS -mkdir $OUTPUT");
assertTrue("Could not create $OUTPUT directory", sh.getRet() == 0);
@@ -63,14 +63,14 @@ class TestHBaseCompression {
if (sh.getRet() == 0) {
sh.exec("hadoop fs $HADOOP_OPTIONS -rmr -skipTrash $OUTPUT");
assertTrue("Deletion of $OUTPUT from HDFS failed",
- sh.getRet() == 0);
+ sh.getRet() == 0);
}
}
void _testCompression(String codec) {
// workaround for hbase; set HBASE_LIBRARY_PATH
sh.exec("export HBASE_LIBRARY_PATH=$JAVA_LIBRARY_PATH",
- "hbase $TEST $HDFS_PATH/testfile.$codec $codec");
+ "hbase $TEST $HDFS_PATH/testfile.$codec $codec");
assertTrue("test failed with codec: $codec", sh.getRet() == 0);
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
index 2ea01db..14c20f2 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseImportExport.groovy
@@ -113,7 +113,7 @@ class TestHBaseImportExport {
String origDigest = HBaseTestUtil.checksumRows(origTable)
String exportDigest = HBaseTestUtil.checksumRows(exportTable)
assertTrue("Original and exported tables contain different data",
- origDigest.equals(exportDigest))
+ origDigest.equals(exportDigest))
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
index c542b26..3c42937 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBasePigSmoke.groovy
@@ -30,12 +30,12 @@ class TestHBasePigSmoke {
private static String extra_jars =
System.getProperty("org.apache.bigtop.itest.hbase.smoke.TestHBasePigSmoke.extra_jars",
- "");
+ "");
private static String register_clause = "";
private static String tmp = "TestHBasePigSmoke-${(new Date().getTime())}";
- private static String TABLE="smoke-${tmp}";
- private static String FAM1='family1';
- private static String FAM2='family2';
+ private static String TABLE = "smoke-${tmp}";
+ private static String FAM1 = 'family1';
+ private static String FAM2 = 'family2';
private static Shell shHBase = new Shell('hbase shell');
private static Shell shPig = new Shell('pig');
@@ -50,32 +50,34 @@ class TestHBasePigSmoke {
@BeforeClass
static void setUp() {
shHBase.exec("create '$TABLE', '$FAM1', '$FAM2'",
- "describe '$TABLE'",
- "quit\n");
+ "describe '$TABLE'",
+ "quit\n");
assertEquals("Creating of the ${TABLE} failed",
- 0, shHBase.ret);
+ 0, shHBase.ret);
}
@AfterClass
static void tearDown() {
shHBase.exec("disable '$TABLE'",
- "drop '$TABLE'",
- "quit\n");
+ "drop '$TABLE'",
+ "quit\n");
sh.exec("hadoop fs -rmr $TABLE");
}
@Ignore("BIGTOP-219")
- @Test(timeout=300000L)
+ @Test(timeout = 300000L)
public void Pig2HBase() {
def script = "\n";
- (1..ROW_CNT).each { script <<= String.format('%020d %d %s\n', it, it, 'localhost') }
+ (1..ROW_CNT).each {
+ script <<= String.format('%020d %d %s\n', it, it, 'localhost')
+ }
sh.exec("hadoop dfs -mkdir $TABLE",
- "hadoop dfs -put <(cat << __EOT__${script}__EOT__) ${TABLE}/data");
+ "hadoop dfs -put <(cat << __EOT__${script}__EOT__) ${TABLE}/data");
assertEquals("Can't copy data to HDFS",
- 0, sh.ret);
+ 0, sh.ret);
shPig.exec("""
${register_clause}
@@ -85,15 +87,15 @@ class TestHBasePigSmoke {
quit
""");
assertEquals("Failed loading data via PIG",
- 0, shPig.ret);
+ 0, shPig.ret);
shHBase.exec("scan '$TABLE'",
- "quit\n");
+ "quit\n");
assertTrue("Scanning the table returned wrong # of rows",
- (shHBase.out.get(shHBase.out.size() - 3) =~ "^$ROW_CNT row.s. in .* seconds").find());
+ (shHBase.out.get(shHBase.out.size() - 3) =~ "^$ROW_CNT row.s. in .* seconds").find());
}
- @Test(timeout=300000L)
+ @Test(timeout = 300000L)
@Ignore("BIGTOP-219")
public void HBase2Pig() {
def script = "\n";
@@ -116,6 +118,6 @@ class TestHBasePigSmoke {
sh.exec("hadoop fs -cat $TABLE/pig/part* | wc -l");
assertEquals("Scanning the PIG output returned wrong # of rows",
- ROW_CNT, sh.out.get(0).toInteger());
+ ROW_CNT, sh.out.get(0).toInteger());
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
index 50bcf42..b32705c 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHBaseSmoke.java
@@ -49,7 +49,7 @@ public class TestHBaseSmoke {
HBaseAdmin admin = new HBaseAdmin(conf);
HTableDescriptor htd =
- HBaseTestUtil.createTestTableDescriptor("testSimplePutGet", TEST_FAMILY);
+ HBaseTestUtil.createTestTableDescriptor("testSimplePutGet", TEST_FAMILY);
admin.createTable(htd);
byte[] tableName = htd.getName();
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
index 1a0ed46..32a7d0c 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestHFileOutputFormat.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.Ignore;
@@ -53,28 +54,32 @@ public class TestHFileOutputFormat {
private static final int ROWSPERSPLIT = 1024;
private static final byte[][] FAMILIES =
- { Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
- Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
+ {Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-A")),
+ Bytes.add(PerformanceEvaluation.FAMILY_NAME, Bytes.toBytes("-B"))};
private static final String HBASE_HOME = System.getenv("HBASE_HOME");
private static final String HBASE_CONF_DIR = System.getenv("HBASE_CONF_DIR");
+
static {
assertNotNull("HBASE_HOME has to be set to run this test",
HBASE_HOME);
assertNotNull("HBASE_CONF_DIR has to be set to run this test",
HBASE_CONF_DIR);
}
+
private static String hbase_jar =
- JarContent.getJarName(HBASE_HOME, "hbase-.*(?<!tests).jar");
+ JarContent.getJarName(HBASE_HOME, "hbase-.*(?<!tests).jar");
private static String hbase_tests_jar =
- JarContent.getJarName(HBASE_HOME, "hbase-.*tests.jar");
+ JarContent.getJarName(HBASE_HOME, "hbase-.*tests.jar");
private static URL incrload_jar_url =
- JarContent.getJarURL(org.apache.bigtop.itest.hbase.smoke.IncrementalPELoad.class);
+ JarContent.getJarURL(org.apache.bigtop.itest.hbase.smoke.IncrementalPELoad.class);
+
static {
assertNotNull("Can't find hbase.jar", hbase_jar);
assertNotNull("Can't find hbase-tests.jar", hbase_tests_jar);
assertNotNull("Can't find jar containing IncrementalPELoad class", incrload_jar_url);
}
+
private static final String HBASE_JAR = HBASE_HOME + "/" + hbase_jar;
private static final String HBASE_TESTS_JAR = HBASE_HOME + "/" + hbase_tests_jar;
private static final String ZOOKEEPER_JAR = HBASE_HOME + "/lib/zookeeper.jar";
@@ -102,7 +107,7 @@ public class TestHFileOutputFormat {
doIncrementalLoadTest("testMRIncrementalLoadWithSplit", true);
}
- private byte [][] generateRandomSplitKeys(int numKeys) {
+ private byte[][] generateRandomSplitKeys(int numKeys) {
Random random = new Random();
byte[][] ret = new byte[numKeys][];
for (int i = 0; i < numKeys; i++) {
@@ -114,7 +119,7 @@ public class TestHFileOutputFormat {
private void doIncrementalLoadTest(String testName, boolean shouldChangeRegions)
throws Exception {
FileSystem fs = HBaseTestUtil.getClusterFileSystem();
- Path testDir = HBaseTestUtil.getMROutputDir(testName);
+ Path testDir = HBaseTestUtil.getMROutputDir(testName);
byte[][] splitKeys = generateRandomSplitKeys(4);
Configuration conf = HBaseConfiguration.create();
@@ -165,7 +170,7 @@ public class TestHFileOutputFormat {
// Ensure data shows up
int expectedRows = NMapInputFormat.getNumMapTasks(conf) * ROWSPERSPLIT;
assertEquals("LoadIncrementalHFiles should put expected data in table",
- expectedRows, HBaseTestUtil.countRows(table));
+ expectedRows, HBaseTestUtil.countRows(table));
Scan scan = new Scan();
ResultScanner results = table.getScanner(scan);
int count = 0;
@@ -180,12 +185,12 @@ public class TestHFileOutputFormat {
}
results.close();
String tableDigestBefore = HBaseTestUtil.checksumRows(table);
-
+
// Cause regions to reopen
admin.disableTable(TABLE_NAME);
admin.enableTable(TABLE_NAME);
assertEquals("Data should remain after reopening of regions",
- tableDigestBefore, HBaseTestUtil.checksumRows(table));
+ tableDigestBefore, HBaseTestUtil.checksumRows(table));
// cleanup
// - disable and drop table
@@ -202,7 +207,7 @@ public class TestHFileOutputFormat {
private void runIncrementalPELoad(String table, String outDir) {
sh.exec("export HADOOP_CLASSPATH=" + HBASE_CONF_DIR + ":" + HBASE_JAR + ":" + HBASE_TESTS_JAR + ":" + ZOOKEEPER_JAR,
- "hadoop jar " + INCRLOAD_JAR + " " + INCRLOAD +
+ "hadoop jar " + INCRLOAD_JAR + " " + INCRLOAD +
" -libjars " + HBASE_JAR + "," + HBASE_TESTS_JAR +
" " + table + " " + outDir);
assertEquals("MR job failed", 0, sh.getRet());
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
index 09cf4b8..d34ab8f 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestImportTsv.groovy
@@ -53,7 +53,7 @@ public class TestImportTsv {
private static final String HBASE_HOME = System.getenv("HBASE_HOME");
static {
assertNotNull("HBASE_HOME has to be set to run this test",
- HBASE_HOME);
+ HBASE_HOME);
}
private static String hbase_jar =
JarContent.getJarName(HBASE_HOME, "hbase-.*(?<!tests).jar");
@@ -70,17 +70,17 @@ public class TestImportTsv {
if (sh.getRet() != 0) {
sh.exec("hadoop fs -mkdir $DATADIR1");
assertTrue("Unable to create directory $DATADIR1",
- sh.getRet() == 0);
+ sh.getRet() == 0);
}
sh.exec("hadoop fs -test -e $DATADIR2");
if (sh.getRet() != 0) {
sh.exec("hadoop fs -mkdir $DATADIR2");
assertTrue("Unable to create directory $DATADIR2",
- sh.getRet() == 0);
+ sh.getRet() == 0);
}
// load data into HDFS
sh.exec("hadoop fs -put movies.tsv $DATADIR1/items",
- "hadoop fs -put movies.psv $DATADIR2/items");
+ "hadoop fs -put movies.psv $DATADIR2/items");
assertTrue("setup failed", sh.getRet() == 0);
}
@@ -88,8 +88,8 @@ public class TestImportTsv {
public static void cleanUp() {
// delete data and junk from HDFS
sh.exec("hadoop fs -rmr -skipTrash $DATADIR1",
- "hadoop fs -rmr -skipTrash $DATADIR2",
- "hadoop fs -rmr -skipTrash /user/$USER/partitions_*");
+ "hadoop fs -rmr -skipTrash $DATADIR2",
+ "hadoop fs -rmr -skipTrash /user/$USER/partitions_*");
assertTrue("teardown failed", sh.getRet() == 0);
}
@@ -126,7 +126,7 @@ public class TestImportTsv {
if (sh.getRet() == 0) {
sh.exec("hadoop fs -rmr -skipTrash $OUTDIR");
assertTrue("Deletion of $OUTDIR from HDFS failed",
- sh.getRet() == 0);
+ sh.getRet() == 0);
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
index 4192095..1adfebd 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/smoke/TestLoadIncrementalHFiles.java
@@ -28,16 +28,18 @@ import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
+
import static org.junit.Assert.assertEquals;
+
import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
import org.apache.bigtop.itest.shell.Shell;
public class TestLoadIncrementalHFiles {
private static final byte[] FAMILY = Bytes.toBytes("f1");
private static final byte[] QUALIFIER = Bytes.toBytes("q1");
- private static final byte[][] SPLIT_KEYS = new byte[][] {
- Bytes.toBytes("ddd"),
- Bytes.toBytes("ppp")
+ private static final byte[][] SPLIT_KEYS = new byte[][]{
+ Bytes.toBytes("ddd"),
+ Bytes.toBytes("ppp")
};
private static Shell sh = new Shell("/bin/bash -s");
@@ -48,10 +50,10 @@ public class TestLoadIncrementalHFiles {
@Test
public void testSimpleLoad() throws Exception {
runTest("testSimpleLoad",
- new byte[][][] {
- new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") },
- new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
- });
+ new byte[][][]{
+ new byte[][]{Bytes.toBytes("aaaa"), Bytes.toBytes("cccc")},
+ new byte[][]{Bytes.toBytes("ddd"), Bytes.toBytes("ooo")},
+ });
}
/**
@@ -61,10 +63,10 @@ public class TestLoadIncrementalHFiles {
@Test
public void testRegionCrossingLoad() throws Exception {
runTest("testRegionCrossingLoad",
- new byte[][][] {
- new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
- new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
- });
+ new byte[][][]{
+ new byte[][]{Bytes.toBytes("aaaa"), Bytes.toBytes("eee")},
+ new byte[][]{Bytes.toBytes("fff"), Bytes.toBytes("zzz")},
+ });
}
private void chmod(String uri) {
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
index 0aaffb3..eeaab24 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Putter.java
@@ -41,7 +41,7 @@ public class Putter {
Put put = null;
if (result != null) {
NavigableMap<byte[], NavigableMap<byte[], byte[]>> cfmap =
- result.getNoVersionMap();
+ result.getNoVersionMap();
if (result.getRow() != null && cfmap != null) {
put = new Put(result.getRow());
@@ -70,7 +70,7 @@ public class Putter {
}
public static int doScanAndPut(HTable table, int val, boolean autoflush)
- throws IOException {
+ throws IOException {
Scan s = new Scan();
byte[] start = {};
byte[] stop = {};
@@ -78,7 +78,7 @@ public class Putter {
s.setStartRow(start);
s.setStopRow(stop);
SingleColumnValueFilter filter = new SingleColumnValueFilter(
- Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
+ Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
s.setFilter(filter);
table.setAutoFlush(autoflush);
@@ -95,7 +95,7 @@ public class Putter {
public static void main(String argv[]) throws IOException {
if (argv.length < 2) {
System.err.println("usage: " + Putter.class.getSimpleName() +
- " <table> <value>");
+ " <table> <value>");
System.err.println(" <value>: a numeric value [0,500)");
System.exit(1);
}
@@ -112,7 +112,7 @@ public class Putter {
}
Configuration conf = HBaseConfiguration.create();
- byte [] tableName = Bytes.toBytes(argv[0]);
+ byte[] tableName = Bytes.toBytes(argv[0]);
int val = Integer.parseInt(argv[1]);
HTable table = new HTable(conf, tableName);
for (int i = 0; i < loops; i++) {
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
index b6a7e2d..cfe5cb5 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/Scanner.java
@@ -39,14 +39,14 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
/**
- * This program scans a table a configurable number of times. Uses
+ * This program scans a table a configurable number of times. Uses
* the table record reader.
*/
public class Scanner {
public static final Log LOG = LogFactory.getLog(Scanner.class);
public static int doScan(HTable table, int val) throws IOException,
- InterruptedException {
+ InterruptedException {
Scan s = new Scan();
byte[] start = {};
byte[] stop = {};
@@ -54,7 +54,7 @@ public class Scanner {
s.setStartRow(start);
s.setStopRow(stop);
SingleColumnValueFilter filter = new SingleColumnValueFilter(
- Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
+ Bytes.toBytes("f1"), Bytes.toBytes("qual"), CompareOp.EQUAL, value);
s.setFilter(filter);
// Keep track of gathered elements.
@@ -70,7 +70,7 @@ public class Scanner {
}
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long,
- byte[]>>> columnFamilyMap = r.getMap();
+ byte[]>>> columnFamilyMap = r.getMap();
// Output time to show if flush related.
String k = Bytes.toStringBinary(r.getRow());
@@ -85,7 +85,7 @@ public class Scanner {
}
System.out.println("scan items counted: " + cnt + " for scan " +
- s.toString() + " with filter f1:qual == " + Bytes.toString(value));
+ s.toString() + " with filter f1:qual == " + Bytes.toString(value));
// Print out dupes.
int dupes = 0;
@@ -104,13 +104,13 @@ public class Scanner {
public static void main(String argv[]) throws IOException {
if (argv.length < 2) {
System.err.println("usage: " + Scanner.class.getSimpleName() +
- " <table> <value>");
+ " <table> <value>");
System.err.println(" <value>: a numeric value [0,500)");
System.exit(1);
}
Configuration conf = HBaseConfiguration.create();
- byte [] tableName = Bytes.toBytes(argv[0]);
+ byte[] tableName = Bytes.toBytes(argv[0]);
int val = Integer.parseInt(argv[1]);
int loops = 1;
for (int i = 1; i < argv.length; i++) {
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
index fec28be..468b702 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestConcurrentScanAndPut.java
@@ -49,11 +49,11 @@ import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
* multiple column families sometimes get split into two rows.
*/
public class TestConcurrentScanAndPut {
- public static Shell scanSh = new Shell( "/bin/bash -s" );
- public static Shell putSh = new Shell( "/bin/bash -s" );
+ public static Shell scanSh = new Shell("/bin/bash -s");
+ public static Shell putSh = new Shell("/bin/bash -s");
public static HBaseAdmin admin;
- public static byte [] tableName;
+ public static byte[] tableName;
public static String putter_pid;
public static int scannerLoops;
@@ -61,10 +61,10 @@ public class TestConcurrentScanAndPut {
@BeforeClass
public static void setUp() throws ClassNotFoundException,
- InterruptedException, IOException {
+ InterruptedException, IOException {
System.out.println("Unpacking resources");
- JarContent.unpackJarContainer(Scanner.class, "." , null);
- JarContent.unpackJarContainer(Putter.class, "." , null);
+ JarContent.unpackJarContainer(Scanner.class, ".", null);
+ JarContent.unpackJarContainer(Putter.class, ".", null);
Configuration conf = HBaseConfiguration.create();
try {
@@ -73,9 +73,9 @@ public class TestConcurrentScanAndPut {
System.err.println("Hbase is not up. Bailing out.");
System.exit(1);
}
-
+
tableName =
- Bytes.toBytes(new String(HBaseTestUtil.getTestTableName("concurrentScanAndPut")));
+ Bytes.toBytes(new String(HBaseTestUtil.getTestTableName("concurrentScanAndPut")));
HTableDescriptor htd = new HTableDescriptor(tableName);
for (int i = 0; i < 10; i++) {
htd.addFamily(new HColumnDescriptor("f" + i));
@@ -97,14 +97,14 @@ public class TestConcurrentScanAndPut {
for (int j = 0; j < 10; j++) {
String value = String.format("%010d", rnd.nextInt(500));
p.add(Bytes.toBytes("f" + j),
- Bytes.toBytes("qual"),
- Bytes.toBytes(value));
+ Bytes.toBytes("qual"),
+ Bytes.toBytes(value));
String bigvalue = String.format("%0100d%0100d%0100d%0100d%0100d" +
- "%0100d%0100d%0100d%0100d%0100d",
- i, i, i, i, i, i, i, i, i, i);
+ "%0100d%0100d%0100d%0100d%0100d",
+ i, i, i, i, i, i, i, i, i, i);
p.add(Bytes.toBytes("f" + j),
- Bytes.toBytes("data"),
- Bytes.toBytes(bigvalue));
+ Bytes.toBytes("data"),
+ Bytes.toBytes(bigvalue));
}
puts.add(p);
if (i % batch == (batch - 1)) {
@@ -119,14 +119,14 @@ public class TestConcurrentScanAndPut {
try {
scannerLoops = Integer.parseInt(System.getProperty(
- "concurrentScanAndPut.scanner.loops"));
+ "concurrentScanAndPut.scanner.loops"));
} catch (NumberFormatException e) {
scannerLoops = 100;
}
try {
putterLoops = Integer.parseInt(System.getProperty(
- "concurrentScanAndPut.putter.loops"));
+ "concurrentScanAndPut.putter.loops"));
} catch (NumberFormatException e) {
putterLoops = 100;
}
@@ -147,20 +147,20 @@ public class TestConcurrentScanAndPut {
String tableNameStr = Bytes.toString(tableName);
System.out.println("Starting puts to test table " + tableNameStr);
putSh.exec("(HBASE_CLASSPATH=. " +
- "hbase org.apache.bigtop.itest.hbase.system.Putter " +
- tableNameStr + " 13 -l " + putterLoops +
- " > /dev/null 2>&1 & echo $! ) 2> /dev/null");
+ "hbase org.apache.bigtop.itest.hbase.system.Putter " +
+ tableNameStr + " 13 -l " + putterLoops +
+ " > /dev/null 2>&1 & echo $! ) 2> /dev/null");
putter_pid = putSh.getOut().get(0);
System.out.println("Starting concurrent scans of test table " +
- tableNameStr);
+ tableNameStr);
scanSh.exec("HBASE_CLASSPATH=. hbase " +
- "org.apache.bigtop.itest.hbase.system.Scanner " +
- tableNameStr + " 13 -l " + scannerLoops + " 2>/dev/null");
+ "org.apache.bigtop.itest.hbase.system.Scanner " +
+ tableNameStr + " 13 -l " + scannerLoops + " 2>/dev/null");
int splitRows = scanSh.getRet();
System.out.println("Split rows: " + splitRows);
assertTrue("Rows were split when scanning table with concurrent writes",
- splitRows == 0);
+ splitRows == 0);
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
index b15b20d..947f59e 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
@@ -60,14 +60,14 @@ import org.junit.Test;
import com.google.common.collect.Lists;
-public class TestLoadAndVerify extends Configured implements Tool {
+public class TestLoadAndVerify extends Configured implements Tool {
private static final String TEST_NAME = "TestLoadAndVerify";
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1");
private static final String NUM_TO_WRITE_KEY =
- "loadmapper.num_to_write";
- private static final long NUM_TO_WRITE_DEFAULT = 100*1000;
+ "loadmapper.num_to_write";
+ private static final long NUM_TO_WRITE_DEFAULT = 100 * 1000;
private static final String TABLE_NAME_KEY = "loadmapper.table";
private static final String TABLE_NAME_DEFAULT = "table";
@@ -89,25 +89,24 @@ public class TestLoadAndVerify extends Configured implements Tool {
/**
* Converts a "long" value between endian systems.
* Borrowed from Apache Commons IO
+ *
* @param value value to convert
* @return the converted value
*/
- public static long swapLong(long value)
- {
+ public static long swapLong(long value) {
return
- ( ( ( value >> 0 ) & 0xff ) << 56 ) +
- ( ( ( value >> 8 ) & 0xff ) << 48 ) +
- ( ( ( value >> 16 ) & 0xff ) << 40 ) +
- ( ( ( value >> 24 ) & 0xff ) << 32 ) +
- ( ( ( value >> 32 ) & 0xff ) << 24 ) +
- ( ( ( value >> 40 ) & 0xff ) << 16 ) +
- ( ( ( value >> 48 ) & 0xff ) << 8 ) +
- ( ( ( value >> 56 ) & 0xff ) << 0 );
+ (((value >> 0) & 0xff) << 56) +
+ (((value >> 8) & 0xff) << 48) +
+ (((value >> 16) & 0xff) << 40) +
+ (((value >> 24) & 0xff) << 32) +
+ (((value >> 32) & 0xff) << 24) +
+ (((value >> 40) & 0xff) << 16) +
+ (((value >> 48) & 0xff) << 8) +
+ (((value >> 56) & 0xff) << 0);
}
public static class LoadMapper
- extends Mapper<NullWritable, NullWritable, NullWritable, NullWritable>
- {
+ extends Mapper<NullWritable, NullWritable, NullWritable, NullWritable> {
private long recordsToWrite;
private HTable table;
private Configuration conf;
@@ -122,10 +121,10 @@ public class TestLoadAndVerify extends Configured implements Tool {
public void setup(Context context) throws IOException {
conf = context.getConfiguration();
recordsToWrite = conf.getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT);
- byte [] tableName = Bytes.toBytes(conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT));
+ byte[] tableName = Bytes.toBytes(conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT));
numBackReferencesPerRow = conf.getInt(NUM_BACKREFS_KEY, NUM_BACKREFS_DEFAULT);
table = new HTable(conf, tableName);
- table.setWriteBufferSize(4*1024*1024);
+ table.setWriteBufferSize(4 * 1024 * 1024);
table.setAutoFlush(false);
String taskId = conf.get("mapred.task.id");
@@ -146,15 +145,15 @@ public class TestLoadAndVerify extends Configured implements Tool {
}
@Override
- protected void map(NullWritable key, NullWritable value,
- Context context) throws IOException, InterruptedException {
+ protected void map(NullWritable key, NullWritable value,
+ Context context) throws IOException, InterruptedException {
String suffix = "/" + shortTaskId;
byte[] row = Bytes.add(new byte[8], Bytes.toBytes(suffix));
- int BLOCK_SIZE = (int)(recordsToWrite / 100);
+ int BLOCK_SIZE = (int) (recordsToWrite / 100);
- for (long i = 0; i < recordsToWrite;) {
+ for (long i = 0; i < recordsToWrite; ) {
long blockStart = i;
for (long idxInBlock = 0;
idxInBlock < BLOCK_SIZE && i < recordsToWrite;
@@ -198,8 +197,8 @@ public class TestLoadAndVerify extends Configured implements Tool {
BytesWritable bwVal = new BytesWritable();
for (KeyValue kv : value.list()) {
if (Bytes.compareTo(TEST_QUALIFIER, 0, TEST_QUALIFIER.length,
- kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength()) == 0) {
- context.write(bwKey, EMPTY);
+ kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength()) == 0) {
+ context.write(bwKey, EMPTY);
} else {
bwVal.set(kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength());
context.write(bwVal, bwKey);
@@ -218,7 +217,7 @@ public class TestLoadAndVerify extends Configured implements Tool {
@Override
protected void reduce(BytesWritable referredRow, Iterable<BytesWritable> referrers,
- VerifyReducer.Context ctx) throws IOException, InterruptedException {
+ VerifyReducer.Context ctx) throws IOException, InterruptedException {
boolean gotOriginalRow = false;
int refCount = 0;
@@ -234,7 +233,7 @@ public class TestLoadAndVerify extends Configured implements Tool {
if (!gotOriginalRow) {
String parsedRow = makeRowReadable(referredRow.getBytes(), referredRow.getLength());
- String binRow = Bytes.toStringBinary(referredRow.getBytes(), 0, referredRow.getLength());
+ String binRow = Bytes.toStringBinary(referredRow.getBytes(), 0, referredRow.getLength());
ctx.write(new Text(binRow), new Text(parsedRow));
}
}
@@ -248,9 +247,9 @@ public class TestLoadAndVerify extends Configured implements Tool {
}
private void doLoad(Configuration conf, HTableDescriptor htd) throws Exception {
- Path outputDir =
- new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
- "load-output");
+ Path outputDir =
+ new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
+ "load-output");
NMapInputFormat.setNumMapTasks(conf, NUM_TASKS);
conf.set(TABLE_NAME_KEY, htd.getNameAsString());
@@ -271,9 +270,9 @@ public class TestLoadAndVerify extends Configured implements Tool {
}
private void doVerify(Configuration conf, HTableDescriptor htd) throws Exception {
- Path outputDir =
- new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
- "verify-output");
+ Path outputDir =
+ new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
+ "verify-output");
Job job = new Job(conf);
job.setJarByClass(this.getClass());
@@ -299,7 +298,7 @@ public class TestLoadAndVerify extends Configured implements Tool {
@Test
public void testLoadAndVerify() throws Exception {
HTableDescriptor htd =
- HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);
+ HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);
HBaseAdmin admin = HBaseTestUtil.getAdmin();
assertNotNull("HBaseAdmin shouldn't be null", admin);
int numPreCreate = 40;
@@ -315,12 +314,12 @@ public class TestLoadAndVerify extends Configured implements Tool {
deleteTable(admin, htd);
}
- private void deleteTable(HBaseAdmin admin, HTableDescriptor htd)
- throws IOException, InterruptedException {
+ private void deleteTable(HBaseAdmin admin, HTableDescriptor htd)
+ throws IOException, InterruptedException {
// Use disableTestAsync because disable can take a long time to complete
- System.out.print("Disabling table " + htd.getNameAsString() +" ");
+ System.out.print("Disabling table " + htd.getNameAsString() + " ");
admin.disableTableAsync(htd.getName());
-
+
long start = System.currentTimeMillis();
// NOTE tables can be both admin.isTableEnabled=false and
// isTableDisabled=false, when disabling must use isTableDisabled!
@@ -329,11 +328,11 @@ public class TestLoadAndVerify extends Configured implements Tool {
Thread.sleep(1000);
}
long delta = System.currentTimeMillis() - start;
- System.out.println(" " + delta +" ms");
- System.out.println("Deleting table " + htd.getNameAsString() +" ");
+ System.out.println(" " + delta + " ms");
+ System.out.println("Deleting table " + htd.getNameAsString() + " ");
admin.deleteTable(htd.getName());
}
-
+
public void usage() {
System.err.println(this.getClass().getSimpleName() + " [-Doptions] <load|verify|loadAndVerify>");
System.err.println(" Loads a table with row dependencies and verifies the dependency chains");
@@ -345,7 +344,7 @@ public class TestLoadAndVerify extends Configured implements Tool {
System.err.println(" -Dloadmapper.numPresplits=<n> Number of presplit regions to start with (default 40)");
System.err.println(" -Dverify.scannercaching=<n> Number hbase scanner caching rows to read (default 50)");
}
-
+
public int run(String argv[]) throws Exception {
if (argv.length < 1 || argv.length > 1) {
usage();
@@ -354,16 +353,16 @@ public class TestLoadAndVerify extends Configured implements Tool {
boolean doLoad = false;
boolean doVerify = false;
- boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter",true);
+ boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter", true);
int numPresplits = getConf().getInt("loadmapper.numPresplits", 40);
if (argv[0].equals("load")) {
doLoad = true;
} else if (argv[0].equals("verify")) {
- doVerify= true;
+ doVerify = true;
} else if (argv[0].equals("loadAndVerify")) {
- doLoad=true;
- doVerify= true;
+ doLoad = true;
+ doVerify = true;
} else {
System.err.println("Invalid argument " + argv[0]);
usage();
@@ -372,7 +371,7 @@ public class TestLoadAndVerify extends Configured implements Tool {
// create HTableDescriptor for specified table
String table = getConf().get(TABLE_NAME_KEY, "");
- HTableDescriptor htd ;
+ HTableDescriptor htd;
if ("".equals(table)) {
// Just like the unit test.
htd = HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
index dcad25d..2132a43 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestMoveRootMetaRegions.java
@@ -35,39 +35,40 @@ import org.junit.BeforeClass;
import org.junit.Test;
/**
- This test contains 3 tests:
-
- 1) Unload the region server hosting -ROOT-. Check that -ROOT- region
- is accessible after a configurable period of time.
-
- 2) The test above for the .META. region.
-
- 3) Stop the region server(s) hosting the -ROOT- and .META. tables
- and verify the regions are moved to other region server(s).
-
- This test does not restart the stopped region server, so users will
- have to manually restart the region server.
-
- The third test is designed for clusters with more than two region servers.
-
- Optional arguments:
- -Droot.timeout.ms=<milliseconds to wait while trying to find -ROOT->
- -Dmeta.timeout.ms=<milliseconds to wait while trying to find .META.>
- -Dwait.after.move.ms=<milliseconds to wait after moving -ROOT- or .META.>
-*/
+ * This test contains 3 tests:
+ * <p/>
+ * 1) Unload the region server hosting -ROOT-. Check that -ROOT- region
+ * is accessible after a configurable period of time.
+ * <p/>
+ * 2) The test above for the .META. region.
+ * <p/>
+ * 3) Stop the region server(s) hosting the -ROOT- and .META. tables
+ * and verify the regions are moved to other region server(s).
+ * <p/>
+ * This test does not restart the stopped region server, so users will
+ * have to manually restart the region server.
+ * <p/>
+ * The third test is designed for clusters with more than two region servers.
+ * <p/>
+ * Optional arguments:
+ * -Droot.timeout.ms=<milliseconds to wait while trying to find -ROOT->
+ * -Dmeta.timeout.ms=<milliseconds to wait while trying to find .META.>
+ * -Dwait.after.move.ms=<milliseconds to wait after moving -ROOT- or .META.>
+ */
public class TestMoveRootMetaRegions {
private static final String HBASE_HOME = System.getenv("HBASE_HOME");
+
static {
Assert.assertNotNull("HBASE_HOME has to be set to run this test", HBASE_HOME);
}
- private static Shell sh = new Shell( "/bin/bash -s" );
+ private static Shell sh = new Shell("/bin/bash -s");
private static String load_regionserver =
- "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
+ "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
private static String unload_regionserver =
- "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
+ "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
private static long meta_timeout_ms;
private static long root_timeout_ms;
@@ -78,18 +79,18 @@ public class TestMoveRootMetaRegions {
private static CatalogTracker ct;
private static String meta_table =
- Bytes.toStringBinary(TableName.META_TABLE_NAME.getName());
+ Bytes.toStringBinary(TableName.META_TABLE_NAME.getName());
@BeforeClass
public static void setUp() throws Exception {
// Default timeout is 3 minutes.
root_timeout_ms =
- Integer.parseInt(System.getProperty("root.timeout.ms", "180000"));
+ Integer.parseInt(System.getProperty("root.timeout.ms", "180000"));
meta_timeout_ms =
- Integer.parseInt(System.getProperty("meta.timeout.ms", "180000"));
+ Integer.parseInt(System.getProperty("meta.timeout.ms", "180000"));
// Default to 20 seconds.
wait_after_move_ms =
- Integer.parseInt(System.getProperty("wait.after.move.ms", "20000"));
+ Integer.parseInt(System.getProperty("wait.after.move.ms", "20000"));
conf = HBaseConfiguration.create();
admin = new HBaseAdmin(conf);
@@ -107,12 +108,12 @@ public class TestMoveRootMetaRegions {
ct.stop();
}
- public static ServerName getMetaAddress() throws Exception{
+ public static ServerName getMetaAddress() throws Exception {
return ct.waitForMeta(meta_timeout_ms);
}
@Test
- public void unloadMetaRegionServer() throws Exception{
+ public void unloadMetaRegionServer() throws Exception {
ServerName meta_address = getMetaAddress();
String cmd = unload_regionserver + meta_address.getHostname();
System.out.println("Unloading the region server hosting " + meta_table);
@@ -143,7 +144,7 @@ public class TestMoveRootMetaRegions {
ServerName new_meta_address = getMetaAddress();
- System.out.println(meta_table + " server address: " + new_meta_address);
+ System.out.println(meta_table + " server address: " + new_meta_address);
Assert.assertThat(meta_address, not(equalTo(new_meta_address)));
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
index 427b198..c8a7617 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestRegionMover.java
@@ -33,25 +33,25 @@ import org.apache.bigtop.itest.shell.Shell;
* This program unloads and reloads region servers and checks that
* regions do not get stuck in transition for too long. The region
* servers are specified by hostname.
- *
+ * <p/>
* Required arguments:
* -Dregionservers=<regionserver1>,<regionserver2>,...
- *
+ * <p/>
* Optional arguments:
* -Dload.iterations=<number of times to unload and load the region servers>
* -Dtimeout.intervals=<number of times to wait for no regions in transition>
* -Dtimeout.ms=<milliseconds to wait before checking for regions in transition>
*/
public class TestRegionMover {
- private static Shell sh = new Shell( "/bin/bash -s" );
+ private static Shell sh = new Shell("/bin/bash -s");
// Commands to execute the region mover and get the detailed HBase status.
private static String load_regionserver =
- "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
+ "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb load ";
private static String unload_regionserver =
- "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
+ "$HBASE_HOME/bin/hbase org.jruby.Main $HBASE_HOME/bin/region_mover.rb unload ";
private static String hbase_status_detailed =
- "echo \"status \'detailed\'\" | $HBASE_HOME/bin/hbase shell";
+ "echo \"status \'detailed\'\" | $HBASE_HOME/bin/hbase shell";
// Number of times we unload/load the region servers.
private static int load_iterations;
@@ -63,6 +63,7 @@ public class TestRegionMover {
private static ArrayList<String> regionservers = new ArrayList<String>();
private static final String HBASE_HOME = System.getenv("HBASE_HOME");
+
static {
assertNotNull("HBASE_HOME has to be set to run this test", HBASE_HOME);
}
@@ -71,7 +72,7 @@ public class TestRegionMover {
public static void setUp() throws InterruptedException {
String region_servers = System.getProperty("regionservers", null);
assertNotNull("Region server(s) must be specified to run this test",
- region_servers);
+ region_servers);
StringTokenizer st = new StringTokenizer(region_servers, ",");
while (st.hasMoreTokens()) {
regionservers.add(st.nextToken());
@@ -79,8 +80,8 @@ public class TestRegionMover {
System.out.println("Region servers to load/unload:\n" + regionservers);
load_iterations = Integer.parseInt(System.getProperty("load.iterations", "10"));
- timeout_intervals = Integer.parseInt(System.getProperty("timeout.intervals","20"));
- timeout_ms = Integer.parseInt(System.getProperty("timeout.ms","20000"));
+ timeout_intervals = Integer.parseInt(System.getProperty("timeout.intervals", "20"));
+ timeout_ms = Integer.parseInt(System.getProperty("timeout.ms", "20000"));
}
@AfterClass
@@ -94,7 +95,7 @@ public class TestRegionMover {
System.out.println("Wait interval: " + i);
sh.exec(hbase_status_detailed);
String status = sh.getOut().toString();
- if(status.indexOf(" 0 regionsInTransition") != -1) {
+ if (status.indexOf(" 0 regionsInTransition") != -1) {
System.out.println(" 0 regionsInTransition.");
return;
} else {
@@ -107,7 +108,7 @@ public class TestRegionMover {
@Test
public void testRegionMover() throws InterruptedException {
System.out.println("Beginning unloading and loading of region servers " +
- load_iterations + " times each");
+ load_iterations + " times each");
String cmd;
for (int i = 0; i < load_iterations; i++) {
for (String rs : regionservers) {
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
index fd2f398..4d718f6 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/util/HBaseTestUtil.java
@@ -43,9 +43,9 @@ import org.apache.hadoop.hbase.util.ChecksumType;
public class HBaseTestUtil {
- public static int BLOCKSIZE = 64*1024;
+ public static int BLOCKSIZE = 64 * 1024;
public static String COMPRESSION =
- Compression.Algorithm.NONE.getName();
+ Compression.Algorithm.NONE.getName();
private static String getTestPrefix() {
return String.valueOf(System.currentTimeMillis());
@@ -56,7 +56,7 @@ public class HBaseTestUtil {
}
public static HTableDescriptor createTestTableDescriptor(String testName,
- byte[] familyName) {
+ byte[] familyName) {
byte[] tableName = getTestTableName(testName);
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor(familyName));
@@ -91,18 +91,17 @@ public class HBaseTestUtil {
Configuration conf,
FileSystem fs, Path path,
byte[] family, byte[] qualifier,
- byte[] startKey, byte[] endKey, int numRows) throws IOException
- {
- HFile.WriterFactory wf = HFile.getWriterFactory(conf, new CacheConfig(conf));
- HFileContext hFileContext = new HFileContext();
- wf.withFileContext(hFileContext);
- wf.withComparator(KeyValue.COMPARATOR);
- wf.withPath(fs, path);
+ byte[] startKey, byte[] endKey, int numRows) throws IOException {
+ HFile.WriterFactory wf = HFile.getWriterFactory(conf, new CacheConfig(conf));
+ HFileContext hFileContext = new HFileContext();
+ wf.withFileContext(hFileContext);
+ wf.withComparator(KeyValue.COMPARATOR);
+ wf.withPath(fs, path);
HFile.Writer writer = wf.create();
long now = System.currentTimeMillis();
try {
// subtract 2 since iterateOnSplits doesn't include boundary keys
- for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, numRows-2)) {
+ for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, numRows - 2)) {
KeyValue kv = new KeyValue(key, family, qualifier, now, key);
writer.append(kv);
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy b/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
index ca609c7..2a143cb 100644
--- a/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
+++ b/bigtop-tests/test-artifacts/hcatalog/src/main/groovy/org/apache/bigtop/itest/hcatalogsmoke/TestHcatalogBasic.groovy
@@ -58,7 +58,6 @@ public class TestHcatalogBasic {
sh.exec("hadoop fs -rmr -skipTrash /user/hive/warehouse")
}
-
/**
* Validate that the table created via hcat exists from Hive's world view
*/
@@ -76,14 +75,14 @@ public class TestHcatalogBasic {
diff -u hcat_basic_describe.expected hive_hcat_basic_verify.actual
""")
assertEquals("hive couldn't detect the table created via hcat, return code: " + sh.ret,
- 0, sh.ret);
+ 0, sh.ret);
sh.exec("""
hcat -e "DESCRIBE hcat_basic" > hcat_hcat_basic_verify.actual
diff -u hcat_basic_describe.expected hcat_hcat_basic_verify.actual
""")
assertEquals("hcat couldn't detect the table created via hcat, return code: " + sh.ret,
- 0, sh.ret);
+ 0, sh.ret);
// Add a partition via hive
sh.exec("hive -e \"ALTER TABLE hcat_basic ADD PARTITION (dt='2013-01-01')\"")
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
index 9cd40eb..78d7a7a 100644
--- a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
+++ b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
@@ -29,125 +29,125 @@ import org.apache.commons.logging.Log
import org.apache.commons.logging.LogFactory
public class TestHttpFs {
- private static Log LOG = LogFactory.getLog(TestHttpFs.class)
-
- private static final String USERNAME = System.getProperty("user.name");
- private static final String HTTPFS_PROXY = System.getenv('HTTPFS_PROXY');
- static {
- assertNotNull("HTTPFS_PROXY has to be set to run this test",
- HTTPFS_PROXY);
- }
-
- private static final String HTTPFS_PREFIX = "http://$HTTPFS_PROXY/webhdfs/v1";
- private static final String HTTPFS_SUCCESS = "{\"boolean\":true}";
-
- private static final String DATA_DIR = System.getProperty("data.dir", "text-files");
-
- private static String testHttpFsFolder = "/tmp/httpfssmoke-" + (new Date().getTime());
- private static String testHttpFsFolderRenamed = "$testHttpFsFolder-renamed";
-
- private static Shell sh = new Shell("/bin/bash");
- // it will used to cleanup directories, as they are created with via curl with user.name=$USERNAME
- private static Shell shUSERNAME = new Shell("/bin/bash", USERNAME);
-
- @BeforeClass
- public static void setUp() {
- }
+ private static Log LOG = LogFactory.getLog(TestHttpFs.class)
- @AfterClass
- public static void tearDown() {
- // clean up of existing folders using USERNAME of user who created them via curl
- shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolder");
- if (shUSERNAME.getRet() == 0) {
- shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
- assertTrue("Deletion of previous testHttpFsFolder from HDFS failed",
- shUSERNAME.getRet() == 0);
- }
- shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
- if (shUSERNAME.getRet() == 0) {
- shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
- assertTrue("Deletion of previous testHttpFsFolderRenamed from HDFS failed",
- shUSERNAME.getRet() == 0);
- }
- }
+ private static final String USERNAME = System.getProperty("user.name");
+ private static final String HTTPFS_PROXY = System.getenv('HTTPFS_PROXY');
+ static {
+ assertNotNull("HTTPFS_PROXY has to be set to run this test",
+ HTTPFS_PROXY);
+ }
+
+ private static final String HTTPFS_PREFIX = "http://$HTTPFS_PROXY/webhdfs/v1";
+ private static final String HTTPFS_SUCCESS = "{\"boolean\":true}";
+
+ private static final String DATA_DIR = System.getProperty("data.dir", "text-files");
+
+ private static String testHttpFsFolder = "/tmp/httpfssmoke-" + (new Date().getTime());
+ private static String testHttpFsFolderRenamed = "$testHttpFsFolder-renamed";
+
+ private static Shell sh = new Shell("/bin/bash");
+ // it will used to cleanup directories, as they are created with via curl with user.name=$USERNAME
+ private static Shell shUSERNAME = new Shell("/bin/bash", USERNAME);
+
+ @BeforeClass
+ public static void setUp() {
+ }
- public void assertValueExists(List<String> values, String expected) {
- boolean exists = false;
- for (String value: values) {
- if (expected.startsWith(value)) {
- exists = true;
- }
- }
- assertTrue(expected + " NOT found!", exists == true);
+ @AfterClass
+ public static void tearDown() {
+ // clean up of existing folders using USERNAME of user who created them via curl
+ shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolder");
+ if (shUSERNAME.getRet() == 0) {
+ shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
+ assertTrue("Deletion of previous testHttpFsFolder from HDFS failed",
+ shUSERNAME.getRet() == 0);
}
-
- private void createDir(String dirname) {
- sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$dirname?user.name=$USERNAME&op=MKDIRS'");
+ shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
+ if (shUSERNAME.getRet() == 0) {
+ shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
+ assertTrue("Deletion of previous testHttpFsFolderRenamed from HDFS failed",
+ shUSERNAME.getRet() == 0);
}
+ }
- @Test
- public void testCreateDir() {
- createDir(testHttpFsFolder)
- assertTrue("curl command to create a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ public void assertValueExists(List<String> values, String expected) {
+ boolean exists = false;
+ for (String value : values) {
+ if (expected.startsWith(value)) {
+ exists = true;
+ }
}
+ assertTrue(expected + " NOT found!", exists == true);
+ }
- @Test
- public void testRenameDir() {
- createDir(testHttpFsFolder);
- assertTrue("curl command to create a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=RENAME&destination=$testHttpFsFolderRenamed'");
- assertTrue("curl command to rename a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- }
+ private void createDir(String dirname) {
+ sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$dirname?user.name=$USERNAME&op=MKDIRS'");
+ }
- @Test
- public void testDeleteDir() {
- createDir(testHttpFsFolder);
- assertTrue("curl command to create a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- sh.exec("curl -i -X DELETE '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=DELETE'");
- assertTrue("curl command to delete a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- }
-
- @Test
- public void testStatusDir() {
- createDir(testHttpFsFolder);
- assertTrue("curl command to create a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- sh.exec("curl -i '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=GETFILESTATUS'");
- assertTrue("curl command to create a dir failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- assertValueExists(sh.getOut(), "DIRECTORY");
- }
+ @Test
+ public void testCreateDir() {
+ createDir(testHttpFsFolder)
+ assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ }
+
+ @Test
+ public void testRenameDir() {
+ createDir(testHttpFsFolder);
+ assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=RENAME&destination=$testHttpFsFolderRenamed'");
+ assertTrue("curl command to rename a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ }
+
+ @Test
+ public void testDeleteDir() {
+ createDir(testHttpFsFolder);
+ assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ sh.exec("curl -i -X DELETE '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=DELETE'");
+ assertTrue("curl command to delete a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ }
+
+ @Test
+ public void testStatusDir() {
+ createDir(testHttpFsFolder);
+ assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ sh.exec("curl -i '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=GETFILESTATUS'");
+ assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ assertValueExists(sh.getOut(), "DIRECTORY");
+ }
+
+ @Test
+ public void testCreateFile() {
+ String filename = "helloworld.txt";
+ String filenameContent = 'Hello World!';
- @Test
- public void testCreateFile() {
- String filename = "helloworld.txt";
- String filenameContent = 'Hello World!';
-
- createDir(testHttpFsFolder);
- assertTrue("curl command to create a dir failed", sh.getRet() == 0);
- sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=CREATE'");
- assertTrue("curl command to create a file failed", sh.getRet() == 0);
- String datanodeLocation = null;
- sh.getOut().each {
- if (it.startsWith("Location:")) {
- datanodeLocation = it.split(' ')[1];
- return true;
- }
- }
- LOG.debug("Datanode location: $datanodeLocation");
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- sh.exec("curl -i -T $DATA_DIR/$filename '$datanodeLocation' --header 'Content-Type:application/octet-stream'");
- assertTrue("curl command to create a file failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- sh.exec("curl -i -L '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=OPEN'");
- assertTrue("curl command to create a file failed", sh.getRet() == 0);
- assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
- assertValueExists(sh.getOut(), filenameContent);
+ createDir(testHttpFsFolder);
+ assertTrue("curl command to create a dir failed", sh.getRet() == 0);
+ sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=CREATE'");
+ assertTrue("curl command to create a file failed", sh.getRet() == 0);
+ String datanodeLocation = null;
+ sh.getOut().each {
+ if (it.startsWith("Location:")) {
+ datanodeLocation = it.split(' ')[1];
+ return true;
+ }
}
+ LOG.debug("Datanode location: $datanodeLocation");
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ sh.exec("curl -i -T $DATA_DIR/$filename '$datanodeLocation' --header 'Content-Type:application/octet-stream'");
+ assertTrue("curl command to create a file failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ sh.exec("curl -i -L '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=OPEN'");
+ assertTrue("curl command to create a file failed", sh.getRet() == 0);
+ assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
+ assertValueExists(sh.getOut(), filenameContent);
+ }
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy b/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
index 50c0a46..4b5a46d 100644
--- a/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hue/src/main/groovy/org/apache/bigtop/itest/huesmoke/TestHueSmoke.groovy
@@ -29,14 +29,14 @@ public class TestHueSmoke {
final static String loginURL = "${hueServer}/accounts/login/";
final static String checkURL = "${hueServer}/debug/check_config";
final static String creds = "username=admin&password=admin";
- final static Map checkApps = [ "about" : "<title>About Hue</title>",
- "filebrowser/view" : "<title>File Browser</title>",
- "help" : "<title>Hue Help</title>",
- // FIXME: HUE-10 "jobbrowser" : "<title>Error</title>",
- "jobsub" : "<title>Job Designer</title>",
- "useradmin" : "<title>Hue Users</title>",
- "beeswax" : "<title>Hive Query</title>",
- "oozie" : "<title>Oozie App</title>" ];
+ final static Map checkApps = ["about": "<title>About Hue</title>",
+ "filebrowser/view": "<title>File Browser</title>",
+ "help": "<title>Hue Help</title>",
+ // FIXME: HUE-10 "jobbrowser" : "<title>Error</title>",
+ "jobsub": "<title>Job Designer</title>",
+ "useradmin": "<title>Hue Users</title>",
+ "beeswax": "<title>Hive Query</title>",
+ "oozie": "<title>Oozie App</title>"];
Shell sh = new Shell();
@@ -53,7 +53,7 @@ public class TestHueSmoke {
sh.exec("curl -m 60 -b '${sessionId}' ${checkURL}");
assertTrue("Global configuration check failed",
- sh.getOut().grep( ~/.*All ok. Configuration check passed.*/ ).size() > 0);
+ sh.getOut().grep(~/.*All ok. Configuration check passed.*/).size() > 0);
checkApps.each { app, expected ->
sh.exec("curl -m 60 -b '${sessionId}' ${hueServer}/${app}/");
if (sh.getOut().join(' ').indexOf(expected) == -1) {
@@ -61,6 +61,6 @@ public class TestHueSmoke {
}
}
assertEquals("Application(s) ${failedApps} failed to respond",
- failedApps.size(), 0);
+ failedApps.size(), 0);
}
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/3e17db89/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy b/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
index caf2dbc..0772a76 100644
--- a/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
+++ b/bigtop-tests/test-artifacts/longevity/src/main/groovy/org/apache/bigtop/itest/iolongevity/TestDFSIO.groovy
@@ -72,10 +72,10 @@ public class TestDFSIO {
@Test
public void testDFSIO() {
- if(FailureVars.instance.getRunFailures()
- || FailureVars.instance.getServiceRestart()
- || FailureVars.instance.getServiceKill()
- || FailureVars.instance.getNetworkShutdown()) {
+ if (FailureVars.instance.getRunFailures()
+ || FailureVars.instance.getServiceRestart()
+ || FailureVars.instance.getServiceKill()
+ || FailureVars.instance.getNetworkShutdown()) {
runFailureThread();
}
@@ -95,7 +95,7 @@ public class TestDFSIO {
}
}
- private void executeCmd(String cmd, String expectedFile){
+ private void executeCmd(String cmd, String expectedFile) {
sh.exec(cmd);
logError(sh);
assertTrue("Command " + cmd + " is unsuccessful", sh.getRet() == 0);