You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by bl...@apache.org on 2011/11/08 02:44:17 UTC

svn commit: r1199050 - in /incubator/sqoop/branches/branch-1.4.0: lib/ lib/apache/ lib/cloudera/ src/java/com/cloudera/sqoop/shims/ src/shims/ src/test/com/cloudera/sqoop/lib/ src/test/com/cloudera/sqoop/mapreduce/db/ src/test/com/cloudera/sqoop/testutil/

Author: blee
Date: Tue Nov  8 01:44:17 2011
New Revision: 1199050

URL: http://svn.apache.org/viewvc?rev=1199050&view=rev
Log:
SQOOP-354 SQOOP needs to be made compatible with Hadoop .23 release

Removed:
    incubator/sqoop/branches/branch-1.4.0/lib/LICENSE-ant-eclipse-1.0-jvm1.2.txt
    incubator/sqoop/branches/branch-1.4.0/lib/LICENSE-hadoop-mrunit-0.20.2-CDH3b2-SNAPSHOT.txt
    incubator/sqoop/branches/branch-1.4.0/lib/apache/
    incubator/sqoop/branches/branch-1.4.0/lib/cloudera/
    incubator/sqoop/branches/branch-1.4.0/src/java/com/cloudera/sqoop/shims/
    incubator/sqoop/branches/branch-1.4.0/src/shims/
    incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/testutil/MockObjectFactory.java
Modified:
    incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
    incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java

Modified: incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java?rev=1199050&r1=1199049&r2=1199050&view=diff
==============================================================================
--- incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java (original)
+++ incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java Tue Nov  8 01:44:17 2011
@@ -31,10 +31,7 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.MapContext;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
-import com.cloudera.sqoop.testutil.MockObjectFactory;
 import com.cloudera.sqoop.testutil.MockResultSet;
 
 /**
@@ -45,7 +42,6 @@ public class TestLargeObjectLoader exten
   protected Configuration conf;
   protected LargeObjectLoader loader;
   protected Path outDir;
-  protected MapContext mapContext;
 
   public void setUp() throws IOException, InterruptedException {
     conf = new Configuration();
@@ -60,17 +56,7 @@ public class TestLargeObjectLoader exten
     }
     fs.mkdirs(outDir);
 
-    /* A mock MapContext that uses FileOutputCommitter.
-     * This MapContext is actually serving two roles here; when writing the
-     * CLOB files, its OutputCommitter is used to determine where to write
-     * the CLOB data, as these are placed in the task output work directory.
-     * When reading the CLOB data back for verification, we use the
-     * getInputSplit() to determine where to read our source data from--the same
-     * directory. We are repurposing the same context for both output and input.
-     */
-    mapContext = MockObjectFactory.getMapContextForIOPath(conf, outDir);
-    loader = new LargeObjectLoader(mapContext.getConfiguration(),
-        FileOutputFormat.getWorkOutputPath(mapContext));
+    loader = new LargeObjectLoader(conf, outDir);
   }
 
   public void testReadClobRef()
@@ -88,7 +74,6 @@ public class TestLargeObjectLoader exten
     assertNotNull(clob);
     assertTrue(clob.isExternal());
     loader.close();
-    mapContext.getOutputCommitter().commitTask(mapContext);
     Reader r = clob.getDataStream(conf, outDir);
     char [] buf = new char[4096];
     int chars = r.read(buf, 0, 4096);
@@ -117,7 +102,6 @@ public class TestLargeObjectLoader exten
     assertNotNull(blob);
     assertTrue(blob.isExternal());
     loader.close();
-    mapContext.getOutputCommitter().commitTask(mapContext);
     InputStream is = blob.getDataStream(conf, outDir);
     byte [] buf = new byte[4096];
     int bytes = is.read(buf, 0, 4096);

Modified: incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java?rev=1199050&r1=1199049&r2=1199050&view=diff
==============================================================================
--- incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java (original)
+++ incubator/sqoop/branches/branch-1.4.0/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java Tue Nov  8 01:44:17 2011
@@ -23,13 +23,14 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.db.*;
 import org.apache.hadoop.mapreduce.lib.output.*;
@@ -38,7 +39,7 @@ import org.apache.hadoop.util.StringUtil
 /**
  * Test aspects of DataDrivenDBInputFormat.
  */
-public class TestDataDrivenDBInputFormat extends HadoopTestCase {
+public class TestDataDrivenDBInputFormat extends TestCase {
 
   private static final Log LOG = LogFactory.getLog(
       TestDataDrivenDBInputFormat.class);
@@ -52,10 +53,6 @@ public class TestDataDrivenDBInputFormat
 
   private static final String OUT_DIR;
 
-  public TestDataDrivenDBInputFormat() throws IOException {
-    super(LOCAL_MR, LOCAL_FS, 1, 1);
-  }
-
   static {
     OUT_DIR = System.getProperty("test.build.data", "/tmp") + "/dddbifout";
   }