You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2013/02/10 07:04:01 UTC

[19/50] [abbrv] git commit: BIGTOP-799, create an itest method for unpacking all the resources into the HDFS or local filesystem

BIGTOP-799, create an itest method for unpacking all the resources into the HDFS or local filesystem


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/aa354502
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/aa354502
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/aa354502

Branch: refs/heads/RCs
Commit: aa354502af1c930d60f15abc986bf0ed4336be5e
Parents: d5acec5
Author: Johnny Zhang <xi...@cloudera.com>
Authored: Tue Dec 4 08:10:12 2012 -0500
Committer: Roman Shaposhnik <rv...@cloudera.com>
Committed: Tue Dec 4 12:29:24 2012 -0800

----------------------------------------------------------------------
 .../org/apache/bigtop/itest/TestUtils.groovy       |   80 +++++++++++++++
 .../itest/hadoopexamples/TestHadoopExamples.groovy |   22 +----
 .../itest/hadoopsmoke/TestHadoopSmoke.groovy       |   11 +--
 3 files changed, 85 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/aa354502/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
----------------------------------------------------------------------
diff --git a/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
new file mode 100644
index 0000000..3020303
--- /dev/null
+++ b/bigtop-test-framework/src/main/groovy/org/apache/bigtop/itest/TestUtils.groovy
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.bigtop.itest
+
+import static org.junit.Assert.assertTrue
+
+import org.apache.bigtop.itest.shell.Shell
+
+public class TestUtils {
+  private static Shell sh = new Shell("/bin/bash -s");
+
+  /** helper method to unpack test input files or input folder into HDFS.
+   * if both inputDir and inputFiles not null,
+   * create the Test resources folder under /user/<USER_NAME>/ in HDFS
+   * and copy individual files to resource folder;
+   * if inputDir is not null, but inputFiles is null,
+   * move the inputDir folder under /user/<USER_NAME>/ in HDFS.
+   * If outputDir is not null,
+   * create output folder under /user/<USER_NAME>/ in HDFS.
+   * @param ref
+   * @param test_input
+   * @param inputFiles
+   * @param test_output
+   */
+  public static void unpackTestResources(Class ref, String inputDir, String[] inputFiles, String outputDir) {
+    // Unpack resource
+    JarContent.unpackJarContainer(ref, '.' , null);
+
+    // create input dir in HDFS
+    if (inputDir != null) {
+      sh.exec("hadoop fs -test -e ${inputDir}");
+      if (sh.getRet() == 0) {
+        sh.exec("hadoop fs -rmr -skipTrash ${inputDir}");
+        assertTrue("Deletion of previous $inputDir from HDFS failed",
+            sh.getRet() == 0);
+      }
+      if (inputFiles != null) {
+        sh.exec("hadoop fs -mkdir -p ${inputDir}");
+        assertTrue("Could not create input directory to HDFS", sh.getRet() == 0);
+        // copy additional files into HDFS input folder
+        inputFiles.each {
+          sh.exec("hadoop fs -put ${it} ${inputDir}");
+          assertTrue("Could not copy input files into input folder in HDFS", sh.getRet() == 0);
+        }
+      } else {
+        // copy the entire resource folder into HDFS
+        sh.exec("hadoop fs -put ${inputDir} ${inputDir}");
+        assertTrue("Could not copy input directory to HDFS", sh.getRet() == 0);
+      }
+    }
+
+    // create output dir in HDFS
+    if (outputDir != null) {
+      sh.exec("hadoop fs -test -e ${outputDir}");
+      if (sh.getRet() == 0) {
+        sh.exec("hadoop fs -rmr -skipTrash ${outputDir}");
+        assertTrue("Deletion of previous examples output from HDFS failed",
+            sh.getRet() == 0);
+      }
+      sh.exec("hadoop fs -mkdir -p ${outputDir}");
+      assertTrue("Could not create output directory in HDFS", sh.getRet() == 0);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/aa354502/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
index 80bc7b5..6f56f1d 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
@@ -25,6 +25,7 @@ import static org.junit.Assert.assertTrue
 import org.junit.Test
 import org.apache.hadoop.conf.Configuration
 import org.apache.bigtop.itest.JarContent
+import org.apache.bigtop.itest.TestUtils
 import org.apache.commons.logging.LogFactory
 import org.apache.commons.logging.Log
 
@@ -62,26 +63,7 @@ class TestHadoopExamples {
   @BeforeClass
   static void setUp() {
     conf = new Configuration();
-    // Unpack resource
-    JarContent.unpackJarContainer(TestHadoopExamples.class, '.' , null)
-
-    sh.exec("hadoop fs -test -e $EXAMPLES");
-    if (sh.getRet() == 0) {
-      sh.exec("hadoop fs -rmr -skipTrash $EXAMPLES");
-      assertTrue("Deletion of previous $EXAMPLES from HDFS failed",
-          sh.getRet() == 0);
-    }
-    sh.exec("hadoop fs -test -e $EXAMPLES_OUT");
-    if (sh.getRet() == 0) {
-      sh.exec("hadoop fs -rmr -skipTrash $EXAMPLES_OUT");
-      assertTrue("Deletion of previous examples output from HDFS failed",
-          sh.getRet() == 0);
-    }
-
-    // copy test files to HDFS
-    sh.exec("hadoop fs -put $EXAMPLES $EXAMPLES",
-        "hadoop fs -mkdir $EXAMPLES_OUT");
-    assertTrue("Could not create output directory", sh.getRet() == 0);
+    TestUtils.unpackTestResources(TestHadoopExamples.class, EXAMPLES, null, EXAMPLES_OUT);
   }
 
   static Map examples =

http://git-wip-us.apache.org/repos/asf/bigtop/blob/aa354502/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
index a63c2c0..e024fea 100644
--- a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
@@ -19,6 +19,7 @@
 package org.apache.bigtop.itest.hadoopsmoke
 
 import org.apache.bigtop.itest.JarContent
+import org.apache.bigtop.itest.TestUtils
 import org.apache.bigtop.itest.shell.Shell
 import org.junit.AfterClass
 import org.junit.BeforeClass
@@ -54,14 +55,8 @@ class TestHadoopSmoke {
 
   @BeforeClass
   static void  setUp() throws IOException {
-    JarContent.unpackJarContainer(TestHadoopSmoke.class, '.' , null)
-
-    sh.exec(
-    "hadoop fs  -mkdir ${testDir}/cachefile",
-    "hadoop dfs -put   cachedir.jar ${testDir}/cachefile",
-    "hadoop dfs -put   input.txt ${testDir}/cachefile",
-    )
-    logError(sh)
+    String[] inputFiles = ["cachedir.jar", "input.txt"];
+    TestUtils.unpackTestResources(TestHadoopSmoke.class, "${testDir}/cachefile", inputFiles, null);
   }
 
   @AfterClass