You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2013/04/25 21:24:45 UTC

[1/3] git commit: BIGTOP-625. Add HDFS Append Test (Sujay Rau via rvs)

Updated Branches:
  refs/heads/master ea1779258 -> 0f1525d14


BIGTOP-625. Add HDFS Append Test (Sujay Rau via rvs)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/4313c623
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/4313c623
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/4313c623

Branch: refs/heads/master
Commit: 4313c6236a8a8551681a0242f59aa28fe5c13263
Parents: ea17792
Author: Roman Shaposhnik <rv...@cloudera.com>
Authored: Thu Apr 25 12:18:52 2013 -0700
Committer: Roman Shaposhnik <rv...@cloudera.com>
Committed: Thu Apr 25 12:18:52 2013 -0700

----------------------------------------------------------------------
 .../bigtop/itest/hadoop/hdfs/TestFileAppend.groovy |  253 +++++++++++++++
 1 files changed, 253 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/4313c623/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
new file mode 100644
index 0000000..8b58685
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestFileAppend.groovy
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.Configuration;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class TestFileAppend {
+ 
+  private static Shell sh = new Shell("/bin/bash -s");
+  private static Shell shHDFS = new Shell("/bin/bash", "hdfs");
+  private static final String HADOOP_HOME = System.getenv('HADOOP_HOME');
+  private static final String HADOOP_CONF_DIR = System.getenv('HADOOP_CONF_DIR');
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String testAppendInput = "testAppendInput$date";
+  private static String testAppendOutput = "testAppendOutput$date";
+  private static String namenode;
+  private static Configuration conf;
+
+  @BeforeClass
+  public static void setUp() {
+    conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    // creating test directory and test files
+    sh.exec("hadoop fs -mkdir $testAppendInput");
+    sh.exec("echo \"-----TEST INPUT1-----\" > appendinput1.txt$date");
+    sh.exec("echo \"-----TEST INPUT2-----\" > appendinput2.txt$date");
+    sh.exec("echo \"-----TEST INPUT1-----\" > appendCorrect.txt$date");
+    sh.exec("echo \"-----TEST INPUT2-----\" >> appendCorrect.txt$date");
+    sh.exec("hadoop fs -put append* $testAppendInput");
+
+    System.out.println("Running File Append Test:");
+
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    // deletion of test folder
+    sh.exec("hadoop fs -test -e $testAppendInput");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rmr -skipTrash $testAppendInput");
+      assertTrue("Deletion of previous testAppendInputs from HDFS failed",
+          sh.getRet() == 0);
+    }
+
+  }
+
+  @Test
+  public void testAppendOnPreExistingFile() { 
+    FileSystem fs = FileSystem.get(conf);
+    
+    // setting paths for I/O stream creation
+    String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput2.txt$date";
+    Path inFile = new Path(myInputPath);
+    assertTrue("Input file not found", fs.exists(inFile));
+    String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput1.txt$date";
+    Path outFile = new Path(myOutputPath);
+    assertTrue("Output file not found", fs.exists(outFile));
+    
+    FSDataInputStream input1 = fs.open(inFile);
+    FSDataOutputStream output1 = fs.append(outFile);
+
+    // append
+    IOUtils.copyBytes(input1, output1, 4096, true);
+
+    sh.exec("hadoop fs -cat $testAppendInput/appendinput1.txt$date > $testAppendOutput");
+    sh.exec("if diff $testAppendOutput appendCorrect.txt$date >/dev/null; then echo \"success\"; else echo \"failure\"; fi");
+    assertTrue("Append did not work", sh.getOut().get(0).equals("success"));
+    sh.exec("rm -rf appendinput1.txt$date", "rm -rf appendinput2.txt$date");
+
+  }
+
+  @Test
+  public void testAppendOnCreatedFile() {
+    FileSystem fs = FileSystem.get(conf);
+    
+    // setting paths for I/O stream creation
+    String myOutputCreate = namenode + "/user/$USERNAME/$testAppendInput/appendinput3.txt$date";
+    Path outCreate = new Path(myOutputCreate);
+    FSDataOutputStream outputTemp = fs.create(outCreate);
+    String myString = "-----TEST INPUT1-----\n";
+    InputStream is = new ByteArrayInputStream(myString.getBytes());
+    IOUtils.copyBytes(is, outputTemp, 4096, true);
+ 
+    String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput2.txt$date";
+    Path inFile = new Path(myInputPath);
+    assertTrue("Input file not found", fs.exists(inFile));
+    String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/appendinput3.txt$date";
+    Path outFile = new Path(myOutputPath);
+    assertTrue("Output file not found", fs.exists(outFile));
+
+    FSDataInputStream input1 = fs.open(inFile);
+    FSDataOutputStream output1 = fs.append(outFile);
+
+    //append
+    IOUtils.copyBytes(input1, output1, 4096, true);
+
+    sh.exec("hadoop fs -cat $testAppendInput/appendinput3.txt$date > $testAppendOutput");
+    sh.exec("if diff $testAppendOutput appendCorrect.txt$date >/dev/null; then echo \"success\"; else echo \"failure\"; fi");
+    assertTrue("Append did not work", sh.getOut().get(0).equals("success"));
+    sh.exec("rm -rf $testAppendOutput", "rm -rf appendinput1.txt$date", "rm -rf appendinput2.txt$date");
+    sh.exec("rm -rf appendCorrect.txt$date");
+    sh.exec("rm -rf appendinput3.txt$date"); 
+ }
+
+
+  @Test
+  public void testAppendFilesGreaterThanBlockSize() {
+    FileSystem fs = FileSystem.get(conf);
+
+    // creating test files that exceed block size; putting them on hdfs
+    sh.exec("dd if=/dev/urandom of=3mboutput.file$date count=3 bs=1048576");
+    assertTrue("File creation error", sh.getRet() == 0);
+    sh.exec("dd if=/dev/urandom of=3mbinput.file$date count=3 bs=1048576");
+    assertTrue("File creation error", sh.getRet() == 0);
+    sh.exec("hadoop fs -Ddfs.block.size=2097152 -put 3mb* $testAppendInput");
+    assertTrue("Could not put test files onto hdfs", sh.getRet() == 0);
+    sh.exec("cat 3mbinput.file$date >> 3mboutput.file$date");
+
+    // setting paths for I/O stream creation    
+    String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/3mbinput.file$date";
+    Path inFile = new Path(myInputPath);
+    assertTrue("Input file not found", fs.exists(inFile));
+    String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/3mboutput.file$date";
+    Path outFile = new Path(myOutputPath);
+    assertTrue("Output file not found", fs.exists(outFile));  
+
+    FSDataInputStream input1 = fs.open(inFile);
+    FSDataOutputStream output1 = fs.append(outFile);
+    
+    // append
+    IOUtils.copyBytes(input1, output1, 4096, true);
+
+    sh.exec("hadoop fs -cat $testAppendInput/3mboutput.file$date > $testAppendOutput");
+    sh.exec("if diff $testAppendOutput 3mboutput.file$date >/dev/null; then echo \"success\"; else echo \"failure\"; fi");
+    assertTrue("Append result is not what is expected", sh.getOut().get(0).equals("success"));
+    sh.exec("rm -rf $testAppendOutput", "rm -rf 3mboutput.file$date", "rm -rf 3mbinput.file$date");
+  }
+
+  @Test
+  public void testFsckSanity() {
+    FileSystem fs = FileSystem.get(conf);
+
+    // test file creation
+    sh.exec("dd if=/dev/zero of=test1.file$date count=1 bs=1048576");
+    assertTrue("File creation error", sh.getRet() == 0);
+    sh.exec("dd if=/dev/zero of=test2.file$date count=1 bs=1048576");
+    assertTrue("File creation error", sh.getRet() == 0);
+    sh.exec("hadoop fs -put test1.file$date $testAppendInput", "hadoop fs -put test2.file$date $testAppendInput");
+    assertTrue("Could not put test files onto hdfs", sh.getRet() == 0);
+
+    // setting paths for I/O stream creation        
+    String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/test1.file$date";
+    Path inFile = new Path(myInputPath);
+    assertTrue("Input file not found", fs.exists(inFile));
+    String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/test2.file$date";
+    Path outFile = new Path(myOutputPath);
+    assertTrue("Output file not found", fs.exists(outFile));  
+
+    FSDataInputStream input1 = fs.open(inFile);
+    FSDataOutputStream output1 = fs.append(outFile);
+    
+    // append
+    IOUtils.copyBytes(input1, output1, 4096, true); 
+  
+    // running fsck
+    shHDFS.exec("hadoop fsck /user/$USERNAME/$testAppendInput/test2.file$date");
+    Boolean success = shHDFS.getOut().get(shHDFS.getOut().size() - 1).contains("is HEALTHY");;
+    assertTrue("Append made file unhealthy", success == true);
+
+    sh.exec("rm -rf test1.file$date", "rm -rf test2.file$date");
+  }
+
+  @Test
+  public void testMultipleOutputStreamFailure() {
+    FileSystem fs = FileSystem.get(conf);
+
+    // test file creation
+    sh.exec("dd if=/dev/zero of=test3.file$date count=1 bs=1048576");
+    assertTrue("File creation error", sh.getRet() == 0);
+    sh.exec("dd if=/dev/zero of=test4.file$date count=1 bs=1048576");
+    assertTrue("File creation error", sh.getRet() == 0);
+    sh.exec("hadoop fs -put test3.file$date $testAppendInput", "hadoop fs -put test4.file$date $testAppendInput");
+    assertTrue("Could not put test files onto hdfs", sh.getRet() == 0);
+
+    // setting paths for I/O stream creation        
+    String myInputPath = namenode + "/user/$USERNAME/$testAppendInput/test3.file$date";
+    Path inFile = new Path(myInputPath);
+    assertTrue("Input file not found", fs.exists(inFile));
+    String myOutputPath = namenode + "/user/$USERNAME/$testAppendInput/test4.file$date";
+    Path outFile = new Path(myOutputPath);
+    assertTrue("Output file not found", fs.exists(outFile));  
+
+    FSDataInputStream input1 = fs.open(inFile);
+    FSDataOutputStream output1 = fs.append(outFile);
+
+    // append
+    IOUtils.copyBytes(input1, output1, 4096, false);
+
+    // attempting second output stream
+    try {
+      FSDataOutputStream output2 = fs.append(outFile);
+      assertTrue("Should not have been able to open second output stream", false);
+      IOUtils.closeStream(output2); 
+    }
+    catch (Exception e) {
+    }
+
+    // attempting second output stream after first stream is closed
+    IOUtils.closeStream(output1);
+    FSDataOutputStream output3 = fs.append(outFile);
+
+    IOUtils.closeStream(output3);
+    IOUtils.closeStream(input1);
+    sh.exec("rm -rf test3.file$date", "rm -rf test4.file$date");
+    assertTrue("Could not remove test files", sh.getRet() == 0);
+
+  }
+
+}
+
+


[2/3] git commit: BIGTOP-621. Add test for distcp - intra cluster (Sujay Rau via rvs)

Posted by rv...@apache.org.
BIGTOP-621. Add test for distcp - intra cluster (Sujay Rau via rvs)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/b41bf501
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/b41bf501
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/b41bf501

Branch: refs/heads/master
Commit: b41bf5012f8b1f9030fe6a0c36dcf072c134ae32
Parents: 4313c62
Author: Roman Shaposhnik <rv...@cloudera.com>
Authored: Thu Apr 25 12:21:36 2013 -0700
Committer: Roman Shaposhnik <rv...@cloudera.com>
Committed: Thu Apr 25 12:21:36 2013 -0700

----------------------------------------------------------------------
 .../itest/hadoop/hdfs/TestDistCpIntra.groovy       |  165 +++++++++++++++
 1 files changed, 165 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/b41bf501/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
new file mode 100644
index 0000000..d14d664
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDistCpIntra.groovy
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+
+
+public class TestDistCpIntra {
+ 
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for distcp absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testDistcpInputs = "testDistcpInputs" + date;
+  private static String testDistcpOutputs = "testDistcpOutputs" + date;
+  private static String dcpfile = "dcpfile" + date;
+  private static String testDistcpIn = "testDistcpIn" + date;
+  private static String testDistcpOut = "testDistcpOut" + date;
+
+  @BeforeClass
+  public static void setUp() {   
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hadoop fs -mkdir $testDistcpInputs");
+    assertTrue("Could not create input directory", sh.getRet() == 0);
+
+    for (int i = 4; i <= 7; i++) {
+      sh.exec("hadoop fs -mkdir $testDistcpInputs$i");
+      assertTrue("Could not create input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hadoop fs -mkdir $testDistcpOutputs");
+    assertTrue("Could not create output directory", sh.getRet() == 0);
+
+    // create sample input files
+    for (int i = 1; i <= 2; i++) {
+      String dcpfile_i = "$dcpfile" + "$i" + ".txt";
+      sh.exec("echo \"test$i\" > $dcpfile_i");
+    }
+    
+    // copy sample input files to hdfs
+    sh.exec("hadoop fs -put $dcpfile* $testDistcpInputs");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // create and copy sample input files - multiple sources , source file
+    for (int i = 4; i <= 7; i++) {
+      String dcpfile_i = "$dcpfile" + "$i" + ".txt";
+      sh.exec("echo \"test$i\" > $dcpfile_i");
+      sh.exec("hadoop fs -put $dcpfile_i $testDistcpInputs$i");
+      assertTrue("Could not copy file to HDFS", sh.getRet() == 0);
+    }
+
+    // do clean up of local dcpfiles 
+    sh.exec("rm -rf $dcpfile*");
+
+    System.out.println("Running Distcp:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    // clean up of existing folders
+    sh.exec("hadoop fs -test -e $testDistcpInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs");
+      assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
+          sh.getRet() == 0);
+    }
+
+    for (int i = 4; i <= 7; i++) {
+      sh.exec("hadoop fs -test -e $testDistcpInputs$i");
+      if (sh.getRet() == 0) {
+       sh.exec("hadoop fs -rmr -skipTrash $testDistcpInputs$i");
+        assertTrue("Deletion of previous testDistcpInputs from HDFS failed",
+            sh.getRet() == 0);
+      }
+    }
+
+    sh.exec("hadoop fs -test -e $testDistcpOutputs");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rmr -skipTrash $testDistcpOutputs");
+      assertTrue("Deletion of previous testDistcpOutputs from HDFS failed",
+          sh.getRet() == 0);
+    }
+
+  }
+
+  @Test
+  public void testDistcpIntra() { 
+    for (int i = 1; i <= 2; i++) {
+      String dcpfile_i = "$dcpfile" + "$i" + ".txt";
+      // running distcp from namenode/src to namenode/dest
+      sh.exec("hadoop distcp $namenode/user/$USERNAME/$testDistcpInputs/$dcpfile_i $namenode/user/$USERNAME/$testDistcpOutputs");
+      assertTrue("Distcp $i failed", sh.getRet() == 0);
+      
+      // confirm that copied file is the same as original file
+      sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpInputs/$dcpfile_i > $testDistcpIn");
+      sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpOutputs/$dcpfile_i > $testDistcpOut");
+      sh.exec("if diff $testDistcpIn $testDistcpOut >/dev/null; then echo \"success\"; else echo \"failure\"; fi");
+      assertTrue("Files corrupted while being copied", sh.getOut().get(0) == "success");
+
+      // clean up
+      sh.exec("rm -rf $testDistcpIn", "rm -rf $testDistcpOut");
+    }
+  } 
+
+  @Test
+  public void testDistcpIntra_MultipleSources() { 
+    String distcp_sources = "distcp_sources" + date;
+    String dcpfile4 = "$testDistcpInputs" + "4/$dcpfile" + "4.txt"
+    String dcpfile5 = "$testDistcpInputs" + "5/$dcpfile" + "5.txt"
+    String dcpfile6 = "$testDistcpInputs" + "6/$dcpfile" + "6.txt"
+    String dcpfile7 = "$testDistcpInputs" + "7/$dcpfile" + "7.txt"
+    // distcp mulitple sources
+    sh.exec("hadoop distcp $namenode/user/$USERNAME/$dcpfile4 $namenode/user/$USERNAME/$dcpfile5 $namenode/user/$USERNAME/$testDistcpOutputs");  
+    assertTrue("Distcp multiple sources failed", sh.getRet() == 0);
+
+    // distcp source file (-f option)
+    sh.exec("echo \"$namenode/user/$USERNAME/$dcpfile6\" > $distcp_sources", "echo \"$namenode/user/$USERNAME/$dcpfile7\" >> $distcp_sources");
+    sh.exec("hadoop fs -put $distcp_sources $namenode/user/$USERNAME/$testDistcpInputs");
+    sh.exec("rm -rf $distcp_sources");
+    sh.exec("hadoop distcp -f $namenode/user/$USERNAME/$testDistcpInputs/$distcp_sources $namenode/user/$USERNAME/$testDistcpOutputs"); 
+    assertTrue("Distcp with a source file failed", sh.getRet() == 0);
+
+    // confirm that copied files are the same as original files for multiple sources and source file
+    for (int i = 4; i <= 7; i++) {
+      String dcpfile_i = "$dcpfile" + "$i" + ".txt";
+      sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpInputs$i/$dcpfile_i > $testDistcpIn");
+      sh.exec("hadoop fs -cat $namenode/user/$USERNAME/$testDistcpOutputs/$dcpfile_i > $testDistcpOut");
+      sh.exec("if diff $testDistcpIn $testDistcpOut >/dev/null; then echo \"success\"; else echo \"failure\"; fi");
+      assertTrue("Files corrupted while being copied", sh.getOut().get(0) == "success");
+      // clean up
+      sh.exec("rm -rf $testDistcpIn", "rm -rf $testDistcpOut");
+    }
+
+  }
+
+}
+


[3/3] git commit: BIGTOP-853. HBase test, TestLoadAndVerify does not work on secure clusters (Enis Soztutar via rvs)

Posted by rv...@apache.org.
BIGTOP-853. HBase test, TestLoadAndVerify does not work on secure clusters (Enis Soztutar via rvs)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/0f1525d1
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/0f1525d1
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/0f1525d1

Branch: refs/heads/master
Commit: 0f1525d14a02d4aeb3d5c2118473b4c22b3789f6
Parents: b41bf50
Author: Roman Shaposhnik <rv...@cloudera.com>
Authored: Thu Apr 25 12:24:14 2013 -0700
Committer: Roman Shaposhnik <rv...@cloudera.com>
Committed: Thu Apr 25 12:24:14 2013 -0700

----------------------------------------------------------------------
 .../itest/hbase/system/TestLoadAndVerify.java      |    1 +
 1 files changed, 1 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f1525d1/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
index 3bf403e..6280e45 100644
--- a/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
+++ b/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
@@ -262,6 +262,7 @@ public class TestLoadAndVerify  extends Configured implements Tool {
     job.setNumReduceTasks(0);
     FileOutputFormat.setOutputPath(job, outputDir);
 
+    TableMapReduceUtil.initCredentials(job);
     TableMapReduceUtil.addDependencyJars(job);
     TableMapReduceUtil.addDependencyJars(
         job.getConfiguration(), HTable.class, Lists.class);