You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ch...@apache.org on 2012/11/28 23:28:23 UTC

svn commit: r1414968 - in /pig/trunk: CHANGES.txt test/org/apache/pig/TestLoadStoreFuncLifeCycle.java

Author: cheolsoo
Date: Wed Nov 28 22:28:22 2012
New Revision: 1414968

URL: http://svn.apache.org/viewvc?rev=1414968&view=rev
Log:
PIG-2978: TestLoadStoreFuncLifeCycle fails with hadoop-2.0.x (cheolsoo)

Modified:
    pig/trunk/CHANGES.txt
    pig/trunk/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java

Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1414968&r1=1414967&r2=1414968&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Wed Nov 28 22:28:22 2012
@@ -376,6 +376,8 @@ OPTIMIZATIONS
 
 BUG FIXES
 
+PIG-2978: TestLoadStoreFuncLifeCycle fails with hadoop-2.0.x (cheolsoo)
+
 PIG-3039: Not possible to use custom version of jackson jars (rohini)
 
 PIG-3045: Specifying sorting field(s) at nightly.conf - fix sortArgs (rohini via cheolsoo)

Modified: pig/trunk/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java?rev=1414968&r1=1414967&r2=1414968&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java (original)
+++ pig/trunk/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java Wed Nov 28 22:28:22 2012
@@ -36,6 +36,7 @@ import org.apache.pig.builtin.mock.Stora
 import org.apache.pig.builtin.mock.Storage.Data;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.FrontendException;
+import org.apache.pig.test.Util;
 import org.junit.Test;
 
 public class TestLoadStoreFuncLifeCycle {
@@ -267,7 +268,12 @@ public class TestLoadStoreFuncLifeCycle 
 
         String paramsString = null;
         for (Object param : params) {
-            String paramString = String.valueOf(param);
+            String paramString = null;
+            if (param instanceof Job) {
+                paramString = ((Job)param).getJobName();
+            } else {
+                paramString = String.valueOf(param);
+            }
             if (paramString.length() > MAX_PARAM_SIZE || paramString.contains("\n")) {
                 int end = paramString.indexOf('\n');
                 if (end == -1 || end > MAX_PARAM_SIZE) {
@@ -288,7 +294,7 @@ public class TestLoadStoreFuncLifeCycle 
             paramsString += ")";
         }
         String call = calledClass + "[" + id + "]." + called.getMethodName();
-        calls.add(call + paramsString /*+ " called by " + findSalient(stackTrace)*/);
+        calls.add(call + paramsString);
         if (called.getMethodName().equals("<init>")) {
             constructorCallers.add(call + " called by " + findSalient(stackTrace));
         }
@@ -340,7 +346,12 @@ public class TestLoadStoreFuncLifeCycle 
         assertEquals("c", out.get(2).get(0));
 
         assertTrue("loader instanciation count increasing: " + Loader.count, Loader.count <= 3);
-        assertTrue("storer instanciation count increasing: " + Storer.count, Storer.count <= 4);
+        // LocalJobRunner gets the outputcommitter to call setupJob in Hadoop
+        // 2.0.x which was not done in Hadoop 1.0.x. (MAPREDUCE-3563) As a
+        // result, the number of StoreFunc instances is greater by 1 in
+        // Hadoop-2.0.x.
+        assertTrue("storer instanciation count increasing: " + Storer.count,
+                Storer.count <= (Util.isHadoop2_0() ? 5 : 4));
 
     }