You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ya...@apache.org on 2010/03/09 17:17:43 UTC

svn commit: r920956 [1/3] - in /hadoop/pig/trunk/contrib/zebra: ./ src/test/org/apache/hadoop/zebra/mapred/ src/test/org/apache/hadoop/zebra/mapreduce/

Author: yanz
Date: Tue Mar  9 16:17:42 2010
New Revision: 920956

URL: http://svn.apache.org/viewvc?rev=920956&view=rev
Log:
PIG-1253: make map/reduce test cases run on real cluster (chaow via yanz)

Modified:
    hadoop/pig/trunk/contrib/zebra/CHANGES.txt
    hadoop/pig/trunk/contrib/zebra/build-contrib.xml
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TableMapReduceExample.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2TypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3TypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4TypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypeApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypedApiNeg.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestTypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestTypedApi2.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs2.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs2TypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs3.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs3TypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs4.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputs4TypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputsTypeApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestMultipleOutputsTypedApiNeg.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestTypedApi.java
    hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapreduce/TestTypedApi2.java

Modified: hadoop/pig/trunk/contrib/zebra/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/CHANGES.txt?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/CHANGES.txt (original)
+++ hadoop/pig/trunk/contrib/zebra/CHANGES.txt Tue Mar  9 16:17:42 2010
@@ -62,6 +62,8 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    PIG-1253: make map/reduce test cases run on real cluster (chaow via yanz)
+
     PIG-1276: Pig resource schema interface changed, so Zebra needs to catch exception thrown from the new interfaces. (xuefuz via yanz)
 
     PIG-1256: Bag field should always contain a tuple type as the field schema in ResourceSchema object converted from Zebra Schema (xuefuz via yanz)

Modified: hadoop/pig/trunk/contrib/zebra/build-contrib.xml
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/build-contrib.xml?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/build-contrib.xml (original)
+++ hadoop/pig/trunk/contrib/zebra/build-contrib.xml Tue Mar  9 16:17:42 2010
@@ -208,6 +208,7 @@
      encoding="${build.encoding}"
      srcdir="${src.test}"
      includes="**/*.java"
+     excludes="**/TestColumnSecurity.java"
      destdir="${build.test}"
      debug="${javac.debug}">
       <classpath refid="test.classpath"/>

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TableMapReduceExample.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TableMapReduceExample.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TableMapReduceExample.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TableMapReduceExample.java Tue Mar  9 16:17:42 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.zebra.mapred;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -70,8 +71,9 @@ import java.util.Iterator;
  */
 
 public class TableMapReduceExample extends Configured implements Tool {
+  private static Configuration conf = null;
 
-  static class Map extends MapReduceBase implements
+  static class MyMap extends MapReduceBase implements
       Mapper<LongWritable, Text, BytesWritable, Tuple> {
     private BytesWritable bytesKey;
     private Tuple tupleRow;
@@ -159,6 +161,7 @@ public class TableMapReduceExample exten
    * @param args
    *          arguments with exception of Tools understandable ones.
    */
+  @Override
   public int run(String[] args) throws Exception {
     if (args == null || args.length != 3) {
       System.out
@@ -169,18 +172,23 @@ public class TableMapReduceExample exten
     /*
      * First MR Job creating a Table with two columns
      */
-    JobConf jobConf = new JobConf();
+    //Configuration conf = getConf();
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJarByClass(TableMapReduceExample.class);
+
     jobConf.setJobName("TableMapReduceExample");
     jobConf.set("table.output.tfile.compression", "none");
 
     // Input settings
     jobConf.setInputFormat(TextInputFormat.class);
-    jobConf.setMapperClass(Map.class);
+    jobConf.setMapperClass(MyMap.class);
     FileInputFormat.setInputPaths(jobConf, new Path(args[0]));
-
+    //FileInputFormat.setInputPaths(jobConf, new Path("/tmp/input.txt"));
+    
     // Output settings
     jobConf.setOutputFormat(BasicTableOutputFormat.class);
     BasicTableOutputFormat.setOutputPath(jobConf, new Path(args[1]));
+    //BasicTableOutputFormat.setOutputPath(jobConf, new Path("/tmp/t1"));
 
     // set the logical schema with 2 columns
     BasicTableOutputFormat.setSchema(jobConf, "word:string, count:int");
@@ -193,14 +201,17 @@ public class TableMapReduceExample exten
 
     // Run Job
     JobClient.runJob(jobConf);
+    
 
     /*
      * Second MR Job for Table Projection of count column
      */
-    JobConf projectionJobConf = new JobConf();
+    //JobConf projectionJobConf = new JobConf();
+    JobConf projectionJobConf = new JobConf(conf);
     projectionJobConf.setJobName("TableProjectionMapReduceExample");
 
     // Input settings
+    projectionJobConf.setJarByClass(TableMapReduceExample.class);
     projectionJobConf.setMapperClass(ProjectionMap.class);
     projectionJobConf.setInputFormat(TableInputFormat.class);
     TableInputFormat.setProjection(projectionJobConf, "count");
@@ -210,7 +221,13 @@ public class TableMapReduceExample exten
 
     // Output settings
     projectionJobConf.setOutputFormat(TextOutputFormat.class);
-    FileOutputFormat.setOutputPath(projectionJobConf, new Path(args[2]));
+    Path p2 = new Path(args[2]);
+    FileSystem fs = p2.getFileSystem(conf);
+    if (fs.exists(p2)) {
+      fs.delete(p2, true);
+    }
+    FileOutputFormat.setOutputPath(projectionJobConf, p2);
+    
     projectionJobConf.setReducerClass(ProjectionReduce.class);
     projectionJobConf.setCombinerClass(ProjectionReduce.class);
 
@@ -221,8 +238,13 @@ public class TableMapReduceExample exten
   }
 
   public static void main(String[] args) throws Exception {
-    int res = ToolRunner.run(new Configuration(), new TableMapReduceExample(),
-        args);
+    System.out.println("*******************  this is new now");
+
+    conf = new Configuration();
+    
+    //int res = ToolRunner.run(new Configuration(), new TableMapReduceExample(), args);
+    int res = ToolRunner.run(conf, new TableMapReduceExample(), args);
+    
     System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs.java Tue Mar  9 16:17:42 2010
@@ -33,6 +33,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -51,6 +52,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -87,15 +90,14 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs {
+public class TestMultipleOutputs extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
-  //protected static ExecType execType = ExecType.MAPREDUCE;
   protected static ExecType execType = ExecType.LOCAL;
   private static MiniCluster cluster;
   protected static PigServer pigServer;
-  private static Configuration conf;
+  private static Configuration conf = null;
   public static String sortKey = null;
 
   private static FileSystem fs;
@@ -114,16 +116,19 @@ public class TestMultipleOutputs {
       System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    /* By default, we use miniCluster */
-    if (System.getProperty("whichCluster") == null) {
+    // By default, we use miniCluster
+    if (System.getenv("whichCluster") == null) {
       whichCluster = "miniCluster";
-      System.setProperty("whichCluster", "miniCluster");
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
     System.out.println("cluster: " + whichCluster);
     
+    if (conf == null) {
+      conf = new Configuration();
+    }
+    
     if (whichCluster.equals("realCluster")) {
       System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
       System.out.println(" get env user name: " + System.getenv("USER"));
@@ -132,7 +137,7 @@ public class TestMultipleOutputs {
         System.out.println("Please set HADOOP_HOME for realCluster testing mode");
         System.exit(0);        
       }
-
+      
       if (System.getenv("USER") == null) {
         System.out.println("Please set USER for realCluster testing mode");
         System.exit(0);        
@@ -145,9 +150,7 @@ public class TestMultipleOutputs {
         System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
         System.exit(0);
       }
-    }    
-
-    conf = new Configuration();
+    }
     
     // set inputPath and output path
     String workingDir = null;
@@ -185,8 +188,7 @@ public class TestMultipleOutputs {
     }
 
     if (whichCluster.equals("realCluster")) {
-      pigServer = new PigServer(ExecType.MAPREDUCE, ConfigurationUtil 
-          .toProperties(conf));
+      pigServer = new PigServer(ExecType.MAPREDUCE, ConfigurationUtil.toProperties(conf));
       pigServer.registerJar(zebraJar);
 
     }
@@ -196,18 +198,19 @@ public class TestMultipleOutputs {
         cluster = MiniCluster.buildCluster();
         pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
         fs = cluster.getFileSystem();
-
       } else {
         pigServer = new PigServer(ExecType.LOCAL);
       }
     }
   }
+  
   @AfterClass
   public static void tearDown() throws Exception {
     if (whichCluster.equalsIgnoreCase("miniCluster")) {
-    pigServer.shutdown();
+      pigServer.shutdown();
     }
   }
+  
   public String getCurrentMethodName() {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     PrintWriter pw = new PrintWriter(baos);
@@ -611,7 +614,6 @@ public class TestMultipleOutputs {
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -670,7 +672,7 @@ public class TestMultipleOutputs {
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
+      //conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -738,16 +740,18 @@ public class TestMultipleOutputs {
 
       return 0;
     }
-
   }
 
   public void runMR(String myMultiLocs, String sortKey) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
-
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    
+    JobConf jobConf = new JobConf(conf);
+    
+    jobConf.setJobName("TestMultipleOutputs");
+    jobConf.setJarByClass(TestMultipleOutputs.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
+    
     // input settings
     jobConf.setInputFormat(TextInputFormat.class);
     jobConf.setMapperClass(TestMultipleOutputs.MapClass.class);
@@ -758,10 +762,8 @@ public class TestMultipleOutputs {
     jobConf.setNumMapTasks(1);
 
     // output settings
-
     jobConf.setOutputFormat(BasicTableOutputFormat.class);
-    BasicTableOutputFormat.setMultipleOutputs(jobConf, myMultiLocs,
-        TestMultipleOutputs.OutputPartitionerClass.class);
+    BasicTableOutputFormat.setMultipleOutputs(jobConf, myMultiLocs, TestMultipleOutputs.OutputPartitionerClass.class);
 
     // set the logical schema with 2 columns
     BasicTableOutputFormat.setSchema(jobConf, "word:string, count:int");
@@ -769,20 +771,31 @@ public class TestMultipleOutputs {
     BasicTableOutputFormat.setStorageHint(jobConf, "[word];[count]");
     BasicTableOutputFormat.setSortInfo(jobConf, sortKey);
     System.out.println("in runMR, sortkey: " + sortKey);
+    
     // set map-only job.
     jobConf.setNumReduceTasks(1);
     JobClient.runJob(jobConf);
     BasicTableOutputFormat.close(jobConf);
   }
-
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs test = new TestMultipleOutputs();
     TestMultipleOutputs.setUpOnce();
-    System.out.println("after setup");
     test.test1();
     test.test2();
     test.test3();
+    
+    return 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    System.out.println("*******************  this is new today");
 
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2.java Tue Mar  9 16:17:42 2010
@@ -33,6 +33,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -51,6 +52,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -86,7 +89,7 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs2 {
+public class TestMultipleOutputs2 extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
@@ -112,39 +115,41 @@ public class TestMultipleOutputs2 {
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
 
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
-    }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
-
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
 
     // set inputPath and output path
@@ -167,6 +172,7 @@ public class TestMultipleOutputs2 {
       multiLocs = new String(workingDir + "/" + "us" + "," + workingDir + "/"
           + "india" + "," + workingDir + "/" + "japan");
     }
+    
     writeToFile(inputPath);
     // check inputPath existence
     File inputFile = new File(inputPath);
@@ -181,19 +187,18 @@ public class TestMultipleOutputs2 {
       System.exit(0);
     }
 
-    if (whichCluster.equalsIgnoreCase("realCluster")) {
+    if (whichCluster.equals("realCluster")) {
       pigServer = new PigServer(ExecType.MAPREDUCE, ConfigurationUtil
           .toProperties(conf));
       pigServer.registerJar(zebraJar);
 
     }
 
-    if (whichCluster.equalsIgnoreCase("miniCluster")) {
+    if (whichCluster.equals("miniCluster")) {
       if (execType == ExecType.MAPREDUCE) {
         cluster = MiniCluster.buildCluster();
         pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
         fs = cluster.getFileSystem();
-
       } else {
         pigServer = new PigServer(ExecType.LOCAL);
       }
@@ -601,7 +606,7 @@ public class TestMultipleOutputs2 {
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
+    //private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -660,7 +665,7 @@ public class TestMultipleOutputs2 {
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
+      //conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -741,8 +746,9 @@ System.out.println("value0 : "+value.get
   public void runMR(String myMultiLocs, String sortKey) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutput2");
+    jobConf.setJarByClass(TestMultipleOutputs2.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -772,13 +778,25 @@ System.out.println("value0 : "+value.get
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs2 test = new TestMultipleOutputs2();
     TestMultipleOutputs2.setUpOnce();
     test.test1();
     test.test2();
     test.test3();
    
+    return 0;
+  }
+  
+  
+  public static void main(String[] args) throws Exception {
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs2(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2TypedApi.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2TypedApi.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2TypedApi.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs2TypedApi.java Tue Mar  9 16:17:42 2010
@@ -34,6 +34,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -52,6 +53,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -88,11 +91,10 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs2TypedApi {
+public class TestMultipleOutputs2TypedApi extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
-  //protected static ExecType execType = ExecType.MAPREDUCE;
   protected static ExecType execType = ExecType.LOCAL;
   private static MiniCluster cluster;
   protected static PigServer pigServer;
@@ -114,39 +116,41 @@ public class TestMultipleOutputs2TypedAp
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
-
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
-    }
-
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
+      whichCluster = System.getenv("whichCluster");
     }
 
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
-
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
+
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
 
     // set inputPath and output path
@@ -159,7 +163,6 @@ public class TestMultipleOutputs2TypedAp
           + "," + "/user/" + System.getenv("USER") + "/" + "india" + ","
           + "/user/" + System.getenv("USER") + "/" + "japan");
       fs = new Path(inputPath).getFileSystem(conf);
-
     } else {
       RawLocalFileSystem rawLFS = new RawLocalFileSystem();
       fs = new LocalFileSystem(rawLFS);
@@ -639,7 +642,7 @@ public class TestMultipleOutputs2TypedAp
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
+    //private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -698,7 +701,7 @@ public class TestMultipleOutputs2TypedAp
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
+      //conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -774,8 +777,9 @@ public class TestMultipleOutputs2TypedAp
   public void runMR(String sortKey, Path... paths) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutputs2TypedApi");
+    jobConf.setJarByClass(TestMultipleOutputs2TypedApi.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -809,13 +813,24 @@ public class TestMultipleOutputs2TypedAp
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs2TypedApi test = new TestMultipleOutputs2TypedApi();
     TestMultipleOutputs2TypedApi.setUpOnce();
     test.test1();
     test.test2();
     test.test3();
 
+    return 0;
   }
+  
+  public static void main(String[] args) throws Exception {
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs2TypedApi(), args);
+    
+    System.exit(res);
+  }  
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3.java Tue Mar  9 16:17:42 2010
@@ -33,6 +33,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -51,6 +52,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -86,11 +89,10 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs3 {
+public class TestMultipleOutputs3 extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
-  //protected static ExecType execType = ExecType.MAPREDUCE;
   protected static ExecType execType = ExecType.LOCAL;
   private static MiniCluster cluster;
   protected static PigServer pigServer;
@@ -112,39 +114,41 @@ public class TestMultipleOutputs3 {
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
-    }
-
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
-
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
+
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
 
     // set inputPath and output path
@@ -465,7 +469,8 @@ public class TestMultipleOutputs3 {
 
   }
  
-  @Test(expected = IOException.class)
+  //@Test(expected = IOException.class)
+  @Test
   public void test1() throws ParseException, IOException,
       org.apache.hadoop.zebra.parser.ParseException, Exception {
     /*
@@ -494,16 +499,15 @@ public class TestMultipleOutputs3 {
   //  checkTable(myMultiLocs);
   //  Assert.fail("test 1 ,should have thrown IOExcepiton");
     System.out.println("DONE test " + getCurrentMethodName());
-
   }
-
+  
 
   static class MapClass implements
       Mapper<LongWritable, Text, BytesWritable, Tuple> {
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
+    //private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -562,7 +566,7 @@ public class TestMultipleOutputs3 {
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
+      //conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -608,23 +612,27 @@ public class TestMultipleOutputs3 {
   }
 
   static class OutputPartitionerClass extends ZebraOutputPartition {
-
     @Override
-    public int getOutputPartition(BytesWritable key, Tuple value) throws IndexOutOfBoundsException, ExecException{
-
+    public int getOutputPartition(BytesWritable key, Tuple value) throws IndexOutOfBoundsException, ExecException {
       // System.out.println(this.jobConf);
-       value.get(2);
-       Assert.fail("int try, should have thrown exception");
-       return 0;
+      try {
+        value.get(2);
+      } catch (IndexOutOfBoundsException e) {
+        return 0;
+      }
+      
+      // should not reach here
+      Assert.fail("int try, should have thrown exception");
+      return 0;
     }
-
   }
 
   public void runMR(String myMultiLocs, String sortKey) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutputs3");
+    jobConf.setJarByClass(TestMultipleOutputs3.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -654,11 +662,22 @@ public class TestMultipleOutputs3 {
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs3 test = new TestMultipleOutputs3();
     TestMultipleOutputs3.setUpOnce();
     test.test1();
 
+    return 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs3(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3TypedApi.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3TypedApi.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3TypedApi.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs3TypedApi.java Tue Mar  9 16:17:42 2010
@@ -34,6 +34,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -52,6 +53,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -88,7 +91,7 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs3TypedApi {
+public class TestMultipleOutputs3TypedApi extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
@@ -114,39 +117,41 @@ public class TestMultipleOutputs3TypedAp
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
 
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
-    }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
-
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
 
     // set inputPath and output path
@@ -186,7 +191,6 @@ public class TestMultipleOutputs3TypedAp
       pigServer = new PigServer(ExecType.MAPREDUCE, ConfigurationUtil
           .toProperties(conf));
       pigServer.registerJar(zebraJar);
-
     }
 
     if (whichCluster.equalsIgnoreCase("miniCluster")) {
@@ -467,7 +471,7 @@ public class TestMultipleOutputs3TypedAp
 
   }
  
-  @Test(expected = IOException.class)
+  @Test
   public void test1() throws ParseException, IOException,
       org.apache.hadoop.zebra.parser.ParseException, Exception {
     /*
@@ -513,7 +517,7 @@ public class TestMultipleOutputs3TypedAp
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
+    //private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -572,7 +576,7 @@ public class TestMultipleOutputs3TypedAp
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
+      //conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -621,20 +625,24 @@ public class TestMultipleOutputs3TypedAp
 
     @Override
     public int getOutputPartition(BytesWritable key, Tuple value) throws IndexOutOfBoundsException, ExecException{
-
-      // System.out.println(this.jobConf);
-       value.get(2);
-       Assert.fail("int try, should have thrown exception");
-       return 0;
+      try {
+        value.get(2);
+      } catch (IndexOutOfBoundsException e) {
+        return 0;
+      }
+      
+      // should not reach here
+      Assert.fail("int try, should have thrown exception");
+      return 0;
     }
-
   }
 
   public void runMR(String sortKey, Path...paths) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutputs3TypedApi");
+    jobConf.setJarByClass(TestMultipleOutputs3TypedApi.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -666,11 +674,22 @@ public class TestMultipleOutputs3TypedAp
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs3TypedApi test = new TestMultipleOutputs3TypedApi();
     TestMultipleOutputs3TypedApi.setUpOnce();
     test.test1();
 
+    return 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs3TypedApi(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4.java Tue Mar  9 16:17:42 2010
@@ -33,6 +33,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -51,6 +52,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -86,7 +89,7 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs4 {
+public class TestMultipleOutputs4 extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
@@ -112,39 +115,41 @@ public class TestMultipleOutputs4 {
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
-    }
-
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
 
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
 
     // set inputPath and output path
@@ -157,7 +162,6 @@ public class TestMultipleOutputs4 {
           + "," + "/user/" + System.getenv("USER") + "/" + "india" + ","
           + "/user/" + System.getenv("USER") + "/" + "japan");
       fs = new Path(inputPath).getFileSystem(conf);
-
     } else {
       RawLocalFileSystem rawLFS = new RawLocalFileSystem();
       fs = new LocalFileSystem(rawLFS);
@@ -530,7 +534,6 @@ public class TestMultipleOutputs4 {
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -589,7 +592,6 @@ public class TestMultipleOutputs4 {
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -647,8 +649,9 @@ public class TestMultipleOutputs4 {
   public void runMR(String myMultiLocs, String sortKey) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutputs4");
+    jobConf.setJarByClass(TestMultipleOutputs4.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -678,11 +681,24 @@ public class TestMultipleOutputs4 {
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs4 test = new TestMultipleOutputs4();
     TestMultipleOutputs4.setUpOnce();
    
-   test.test1();
+    test.test1();
+   
+    return 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    //XXX
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs4(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4TypedApi.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4TypedApi.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4TypedApi.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputs4TypedApi.java Tue Mar  9 16:17:42 2010
@@ -34,6 +34,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -52,6 +53,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -88,7 +91,7 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputs4TypedApi {
+public class TestMultipleOutputs4TypedApi extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
@@ -113,41 +116,43 @@ public class TestMultipleOutputs4TypedAp
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
-    }
-
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
 
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
-
+    
     // set inputPath and output path
     String workingDir = null;
     if (whichCluster.equalsIgnoreCase("realCluster")) {
@@ -541,7 +546,6 @@ public class TestMultipleOutputs4TypedAp
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -600,7 +604,6 @@ public class TestMultipleOutputs4TypedAp
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -659,8 +662,9 @@ public class TestMultipleOutputs4TypedAp
   public void runMR(String sortKey, Path... paths) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutputs4TypedApi");
+    jobConf.setJarByClass(TestMultipleOutputs4TypedApi.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -693,11 +697,24 @@ public class TestMultipleOutputs4TypedAp
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputs4TypedApi test = new TestMultipleOutputs4TypedApi();
     TestMultipleOutputs4TypedApi.setUpOnce();
 
     test.test1();
+    
+    return 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    //XXX
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputs4TypedApi(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypeApi.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypeApi.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypeApi.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypeApi.java Tue Mar  9 16:17:42 2010
@@ -34,6 +34,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -52,6 +53,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -89,7 +92,7 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputsTypeApi {
+public class TestMultipleOutputsTypeApi extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
@@ -115,41 +118,43 @@ public class TestMultipleOutputsTypeApi 
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
 
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
-    }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
-
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
-
+    
     // set inputPath and output path
     String workingDir = null;
     if (whichCluster.equalsIgnoreCase("realCluster")) {
@@ -655,7 +660,6 @@ public class TestMultipleOutputsTypeApi 
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -714,7 +718,6 @@ public class TestMultipleOutputsTypeApi 
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -788,8 +791,10 @@ public class TestMultipleOutputsTypeApi 
   public void runMR(String sortKey, Path... paths) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    //XXX
+    jobConf.setJobName("TestMultipleOutputsTypeApi");
+    jobConf.setJarByClass(TestMultipleOutputsTypeApi.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -821,14 +826,27 @@ public class TestMultipleOutputsTypeApi 
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputsTypeApi test = new TestMultipleOutputsTypeApi();
     TestMultipleOutputsTypeApi.setUpOnce();
     System.out.println("after setup");
+    
     test.test1();
     test.test2();
     test.test3();
+    
+    return 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    //XXX
+    System.out.println("*******************  this is new today");
 
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputsTypeApi(), args);
+    
+    System.exit(res);
   }
 }

Modified: hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypedApiNeg.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypedApiNeg.java?rev=920956&r1=920955&r2=920956&view=diff
==============================================================================
--- hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypedApiNeg.java (original)
+++ hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/mapred/TestMultipleOutputsTypedApiNeg.java Tue Mar  9 16:17:42 2010
@@ -34,6 +34,7 @@ import java.util.TreeMap;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -52,6 +53,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.zebra.mapred.BasicTableOutputFormat;
 import org.apache.hadoop.zebra.mapred.TestBasicTableIOFormatLocalFS.InvIndex;
 import org.apache.hadoop.zebra.parser.ParseException;
@@ -88,7 +91,7 @@ import org.junit.Test;
  * 
  * 
  */
-public class TestMultipleOutputsTypedApiNeg {
+public class TestMultipleOutputsTypedApiNeg extends Configured implements Tool {
 
   static String inputPath;
   static String inputFileName = "multi-input.txt";
@@ -114,39 +117,41 @@ public class TestMultipleOutputsTypedApi
   public static void setUpOnce() throws IOException {
     if (System.getenv("hadoop.log.dir") == null) {
       String base = new File(".").getPath(); // getAbsolutePath();
-      System
-          .setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
+      System.setProperty("hadoop.log.dir", new Path(base).toString() + "./logs");
     }
 
-    if (System.getProperty("whichCluster") == null) {
-      System.setProperty("whichCluster", "miniCluster");
-      System.out.println("should be called");
-      whichCluster = System.getProperty("whichCluster");
+    // by default we use miniCluster
+    if (System.getenv("whichCluster") == null) {
+      whichCluster = "miniCluster";
     } else {
-      whichCluster = System.getProperty("whichCluster");
+      whichCluster = System.getenv("whichCluster");
     }
 
-    System.out.println("clusterddddd: " + whichCluster);
-    System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
-    System.out.println(" get env user name: " + System.getenv("USER"));
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System
-        .getenv("HADOOP_HOME") == null)) {
-      System.out.println("Please set HADOOP_HOME");
-      System.exit(0);
-    }
-
-    conf = new Configuration();
-
-    if ((whichCluster.equalsIgnoreCase("realCluster") && System.getenv("USER") == null)) {
-      System.out.println("Please set USER");
-      System.exit(0);
+    if (conf == null) {
+      conf = new Configuration();
     }
-    zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
-
-    File file = new File(zebraJar);
-    if (!file.exists() && whichCluster.equalsIgnoreCase("realCluster")) {
-      System.out.println("Please put zebra.jar at hadoop_home/lib");
-      System.exit(0);
+    
+    if (whichCluster.equals("realCluster")) {
+      System.out.println(" get env hadoop home: " + System.getenv("HADOOP_HOME"));
+      System.out.println(" get env user name: " + System.getenv("USER"));
+      
+      if (System.getenv("HADOOP_HOME") == null) {
+        System.out.println("Please set HADOOP_HOME for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      if (System.getenv("USER") == null) {
+        System.out.println("Please set USER for realCluster testing mode");
+        System.exit(0);        
+      }
+      
+      zebraJar = System.getenv("HADOOP_HOME") + "/lib/zebra.jar";
+
+      File file = new File(zebraJar);
+      if (!file.exists()) {
+        System.out.println("Please place zebra.jar at $HADOOP_HOME/lib");
+        System.exit(0);
+      }
     }
 
     // set inputPath and output path
@@ -525,9 +530,20 @@ public class TestMultipleOutputsTypedApi
           + methodName + "," + "/user/" + System.getenv("USER") + "/" + ""
           + "," + "/user/" + System.getenv("USER") + "/" + "b" + methodName);
 
+      
       paths.add(new Path(new String("/user/" + System.getenv("USER") + "/"
           + "a" + methodName)));
-      paths.add(new Path(""));
+      
+      try {
+        paths.add(new Path(""));
+      } catch (IllegalArgumentException e) {
+        System.out.println(e.getMessage());
+        System.exit(0);
+      }
+      
+      // should not reach here
+      Assert.fail("Should have seen exception already");
+      
       paths.add(new Path(new String("/user/" + System.getenv("USER") + "/"
           + "b" + methodName)));
     } else {
@@ -669,7 +685,6 @@ public class TestMultipleOutputsTypedApi
     private BytesWritable bytesKey;
     private Tuple tupleRow;
     private Object javaObj;
-    private JobConf conf;
 
     @Override
     public void map(LongWritable key, Text value,
@@ -728,7 +743,6 @@ public class TestMultipleOutputsTypedApi
     @Override
     public void configure(JobConf job) {
       bytesKey = new BytesWritable();
-      conf = job;
       sortKey = job.get("sortKey");
       try {
         Schema outSchema = BasicTableOutputFormat.getSchema(job);
@@ -797,8 +811,9 @@ public class TestMultipleOutputsTypedApi
   public void runMR(String sortKey, Path... paths) throws ParseException,
       IOException, Exception, org.apache.hadoop.zebra.parser.ParseException {
 
-    JobConf jobConf = new JobConf();
-    jobConf.setJobName("tableMRSample");
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setJobName("TestMultipleOutputsTypedApiNeg");
+    jobConf.setJarByClass(TestMultipleOutputsTypedApiNeg.class);
     jobConf.set("table.output.tfile.compression", "gz");
     jobConf.set("sortKey", sortKey);
     // input settings
@@ -829,8 +844,8 @@ public class TestMultipleOutputsTypedApi
     BasicTableOutputFormat.close(jobConf);
   }
 
-  public static void main(String[] args) throws ParseException,
-      org.apache.hadoop.zebra.parser.ParseException, Exception {
+  @Override
+  public int run(String[] args) throws Exception {
     TestMultipleOutputsTypedApiNeg test = new TestMultipleOutputsTypedApiNeg();
     TestMultipleOutputsTypedApiNeg.setUpOnce();
 
@@ -838,5 +853,18 @@ public class TestMultipleOutputsTypedApi
     test.test2();
     test.test3();
     test.test4();
+    
+    return 0;
   }
+
+  public static void main(String[] args) throws Exception {
+    //XXX
+    System.out.println("*******************  this is new today");
+
+    conf = new Configuration();
+    
+    int res = ToolRunner.run(conf, new TestMultipleOutputsTypedApiNeg(), args);
+    
+    System.exit(res);
+  } 
 }