You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-user@hadoop.apache.org by ch huang <ju...@gmail.com> on 2013/11/11 10:05:34 UTC

the code i can run in one cluster but error in another cluster

here is my java code ,i compile it and run in test env ,it's ok ,but when i
run in product env ,it get error info

package com.hadoop.export;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Random;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException ;
import org.apache.hadoop.hbase.ZooKeeperConnectionException ;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;

public class ExportMe {
    private static Configuration conf =null;
    private static int ROW_LENGTH=1000;
    private boolean writeToWAL = true;
    private static HTablePool pool = new HTablePool(conf,10);
    private static final Random randomSeed =
              new Random(System.currentTimeMillis());
    private static long nextRandomSeed() {
        return randomSeed.nextLong();
      }
    protected final Random rand = new Random(nextRandomSeed());
     /**
      * .....
     */
     static {
         conf = HBaseConfiguration.create();
//         conf.addResource( "hbase-site.xml");
         conf.set("hbase.cluster.distributed", "true");
         conf.set("hbase.zookeeper.quorum" ,"ch11,ch12,ch13" );
         conf.set("hbase.zookeeper.property.clientPort" , "2181" );
         conf = HBaseConfiguration.create(conf);
     }

 public static void QueryAll(String tablename,String mykey,FileWriter fw) {
       try {
           HTable table = new HTable(conf, tablename);
    //     HTable table = (HTable)(pool.getTable(tablename));
               Scan myscan = new Scan();
               FilterList fl = new FilterList();
               fl.addFilter(new PrefixFilter(mykey.getBytes()));
               myscan.setFilter(fl);
           ResultScanner rs = table.getScanner(myscan);
           for (Result r : rs) {
           //    System.out.println("...rowkey:" + new String(r.getRow()));
                String myoutput = "";
               for (KeyValue keyValue : r.raw()) {
         /*          System.out.println(".." + new
String(keyValue.getFamily())
                              + "====.:" + new
String(keyValue.getValue()));  */
                   myoutput += new String(keyValue.getValue())+",";
               }
//              System.out.println(myoutput);
                fw.write(myoutput);
           }
           table.close();
       } catch (IOException e) {
           e.printStackTrace();
       }
   }
       public static void readFileByLines(String fileName) {
        File file = new File(fileName);
        BufferedReader reader = null;
        try {
            System.out.println("...................");
            reader = new BufferedReader(new FileReader(file));
            String tempString = null;
            int line = 1;
            // ...........null.....
                FileWriter fw = new FileWriter("/tmp/myout.txt");
            while ((tempString = reader.readLine()) != null) {
                // ....
               // System.out.println("line " + line + ": " + tempString);
                QueryAll("dmp_pageview",tempString,fw);
                line++;
            }
                fw.close();
            reader.close();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (reader != null) {
                try {
                    reader.close();
                } catch (IOException e1) {
                }
            }
        }
    }
    public static void   main (String [] agrs) {
        try {
            long startTime = System.currentTimeMillis();
   readFileByLines("/tmp/mysearch.log");

            long elapsedTime = System.currentTimeMillis() - startTime;
            System.out.println("elapsed time is : "+elapsedTime+" ms\n");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}


# hadoop com/hadoop/export/ExportMe
Error: Could not find or load main class com.hadoop.export.ExportMe

# ls com/hadoop/export/
ExportMe.class

Re: the code i can run in one cluster but error in another cluster

Posted by Ted Yu <yu...@gmail.com>.
You should package your class in a jar file. 

Cheers

On Nov 11, 2013, at 1:05 AM, ch huang <ju...@gmail.com> wrote:

> here is my java code ,i compile it and run in test env ,it's ok ,but when i run in product env ,it get error info
>  
> package com.hadoop.export;
> import java.io.BufferedReader;
> import java.io.File;
> import java.io.FileReader;
> import java.io.FileWriter;
> import java.io.IOException;
> import java.util.ArrayList;
> import java.util.Date;
> import java.util.List;
> import java.util.Random;
>  
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.hbase.HBaseConfiguration;
> import org.apache.hadoop.hbase.HColumnDescriptor;
> import org.apache.hadoop.hbase.HTableDescriptor;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.MasterNotRunningException ;
> import org.apache.hadoop.hbase.ZooKeeperConnectionException ;
> import org.apache.hadoop.hbase.client.Delete;
> import org.apache.hadoop.hbase.client.Get;
> import org.apache.hadoop.hbase.client.HBaseAdmin;
> import org.apache.hadoop.hbase.client.HTable;
> import org.apache.hadoop.hbase.client.HTablePool;
> import org.apache.hadoop.hbase.client.Result;
> import org.apache.hadoop.hbase.client.ResultScanner;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.client.Put;
> import org.apache.hadoop.hbase.filter.FilterList;
> import org.apache.hadoop.hbase.filter.PrefixFilter;
> import org.apache.hadoop.hbase.util.Bytes;
> 
> public class ExportMe {
>     private static Configuration conf =null;
>     private static int ROW_LENGTH=1000;
>     private boolean writeToWAL = true;
>     private static HTablePool pool = new HTablePool(conf,10);
>     private static final Random randomSeed =
>               new Random(System.currentTimeMillis());
>     private static long nextRandomSeed() {
>         return randomSeed.nextLong();
>       }
>     protected final Random rand = new Random(nextRandomSeed());
>      /**
>       * .....
>      */
>      static {
>          conf = HBaseConfiguration.create();
> //         conf.addResource( "hbase-site.xml");
>          conf.set("hbase.cluster.distributed", "true");
>          conf.set("hbase.zookeeper.quorum" ,"ch11,ch12,ch13" );
>          conf.set("hbase.zookeeper.property.clientPort" , "2181" );
>          conf = HBaseConfiguration.create(conf);
>      }
>   
>  public static void QueryAll(String tablename,String mykey,FileWriter fw) {
>        try {
>            HTable table = new HTable(conf, tablename);
>     //     HTable table = (HTable)(pool.getTable(tablename));
>                Scan myscan = new Scan();
>                FilterList fl = new FilterList();
>                fl.addFilter(new PrefixFilter(mykey.getBytes()));
>                myscan.setFilter(fl);
>            ResultScanner rs = table.getScanner(myscan);
>            for (Result r : rs) {
>            //    System.out.println("...rowkey:" + new String(r.getRow()));
>                 String myoutput = "";
>                for (KeyValue keyValue : r.raw()) {
>          /*          System.out.println(".." + new String(keyValue.getFamily())
>                               + "====.:" + new String(keyValue.getValue()));  */
>                    myoutput += new String(keyValue.getValue())+",";
>                }
> //              System.out.println(myoutput);
>                 fw.write(myoutput);
>            }
>            table.close();
>        } catch (IOException e) {
>            e.printStackTrace();
>        }
>    }
>        public static void readFileByLines(String fileName) {
>         File file = new File(fileName);
>         BufferedReader reader = null;
>         try {
>             System.out.println("...................");
>             reader = new BufferedReader(new FileReader(file));
>             String tempString = null;
>             int line = 1;
>             // ...........null.....
>                 FileWriter fw = new FileWriter("/tmp/myout.txt");
>             while ((tempString = reader.readLine()) != null) {
>                 // ....
>                // System.out.println("line " + line + ": " + tempString);
>                 QueryAll("dmp_pageview",tempString,fw);
>                 line++;
>             }
>                 fw.close();
>             reader.close();
>         } catch (IOException e) {
>             e.printStackTrace();
>         } finally {
>             if (reader != null) {
>                 try {
>                     reader.close();
>                 } catch (IOException e1) {
>                 }
>             }
>         }
>     }
>     public static void   main (String [] agrs) {
>         try {
>             long startTime = System.currentTimeMillis();
>    readFileByLines("/tmp/mysearch.log");
>             
>             long elapsedTime = System.currentTimeMillis() - startTime;
>             System.out.println("elapsed time is : "+elapsedTime+" ms\n");  
>         } catch (Exception e) {
>             e.printStackTrace();
>         }
>     }
> }
>  
>  
> # hadoop com/hadoop/export/ExportMe
> Error: Could not find or load main class com.hadoop.export.ExportMe
>  
> # ls com/hadoop/export/
> ExportMe.class
>  

Re: the code i can run in one cluster but error in another cluster

Posted by Ted Yu <yu...@gmail.com>.
You should package your class in a jar file. 

Cheers

On Nov 11, 2013, at 1:05 AM, ch huang <ju...@gmail.com> wrote:

> here is my java code ,i compile it and run in test env ,it's ok ,but when i run in product env ,it get error info
>  
> package com.hadoop.export;
> import java.io.BufferedReader;
> import java.io.File;
> import java.io.FileReader;
> import java.io.FileWriter;
> import java.io.IOException;
> import java.util.ArrayList;
> import java.util.Date;
> import java.util.List;
> import java.util.Random;
>  
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.hbase.HBaseConfiguration;
> import org.apache.hadoop.hbase.HColumnDescriptor;
> import org.apache.hadoop.hbase.HTableDescriptor;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.MasterNotRunningException ;
> import org.apache.hadoop.hbase.ZooKeeperConnectionException ;
> import org.apache.hadoop.hbase.client.Delete;
> import org.apache.hadoop.hbase.client.Get;
> import org.apache.hadoop.hbase.client.HBaseAdmin;
> import org.apache.hadoop.hbase.client.HTable;
> import org.apache.hadoop.hbase.client.HTablePool;
> import org.apache.hadoop.hbase.client.Result;
> import org.apache.hadoop.hbase.client.ResultScanner;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.client.Put;
> import org.apache.hadoop.hbase.filter.FilterList;
> import org.apache.hadoop.hbase.filter.PrefixFilter;
> import org.apache.hadoop.hbase.util.Bytes;
> 
> public class ExportMe {
>     private static Configuration conf =null;
>     private static int ROW_LENGTH=1000;
>     private boolean writeToWAL = true;
>     private static HTablePool pool = new HTablePool(conf,10);
>     private static final Random randomSeed =
>               new Random(System.currentTimeMillis());
>     private static long nextRandomSeed() {
>         return randomSeed.nextLong();
>       }
>     protected final Random rand = new Random(nextRandomSeed());
>      /**
>       * .....
>      */
>      static {
>          conf = HBaseConfiguration.create();
> //         conf.addResource( "hbase-site.xml");
>          conf.set("hbase.cluster.distributed", "true");
>          conf.set("hbase.zookeeper.quorum" ,"ch11,ch12,ch13" );
>          conf.set("hbase.zookeeper.property.clientPort" , "2181" );
>          conf = HBaseConfiguration.create(conf);
>      }
>   
>  public static void QueryAll(String tablename,String mykey,FileWriter fw) {
>        try {
>            HTable table = new HTable(conf, tablename);
>     //     HTable table = (HTable)(pool.getTable(tablename));
>                Scan myscan = new Scan();
>                FilterList fl = new FilterList();
>                fl.addFilter(new PrefixFilter(mykey.getBytes()));
>                myscan.setFilter(fl);
>            ResultScanner rs = table.getScanner(myscan);
>            for (Result r : rs) {
>            //    System.out.println("...rowkey:" + new String(r.getRow()));
>                 String myoutput = "";
>                for (KeyValue keyValue : r.raw()) {
>          /*          System.out.println(".." + new String(keyValue.getFamily())
>                               + "====.:" + new String(keyValue.getValue()));  */
>                    myoutput += new String(keyValue.getValue())+",";
>                }
> //              System.out.println(myoutput);
>                 fw.write(myoutput);
>            }
>            table.close();
>        } catch (IOException e) {
>            e.printStackTrace();
>        }
>    }
>        public static void readFileByLines(String fileName) {
>         File file = new File(fileName);
>         BufferedReader reader = null;
>         try {
>             System.out.println("...................");
>             reader = new BufferedReader(new FileReader(file));
>             String tempString = null;
>             int line = 1;
>             // ...........null.....
>                 FileWriter fw = new FileWriter("/tmp/myout.txt");
>             while ((tempString = reader.readLine()) != null) {
>                 // ....
>                // System.out.println("line " + line + ": " + tempString);
>                 QueryAll("dmp_pageview",tempString,fw);
>                 line++;
>             }
>                 fw.close();
>             reader.close();
>         } catch (IOException e) {
>             e.printStackTrace();
>         } finally {
>             if (reader != null) {
>                 try {
>                     reader.close();
>                 } catch (IOException e1) {
>                 }
>             }
>         }
>     }
>     public static void   main (String [] agrs) {
>         try {
>             long startTime = System.currentTimeMillis();
>    readFileByLines("/tmp/mysearch.log");
>             
>             long elapsedTime = System.currentTimeMillis() - startTime;
>             System.out.println("elapsed time is : "+elapsedTime+" ms\n");  
>         } catch (Exception e) {
>             e.printStackTrace();
>         }
>     }
> }
>  
>  
> # hadoop com/hadoop/export/ExportMe
> Error: Could not find or load main class com.hadoop.export.ExportMe
>  
> # ls com/hadoop/export/
> ExportMe.class
>  

Re: the code i can run in one cluster but error in another cluster

Posted by Ted Yu <yu...@gmail.com>.
You should package your class in a jar file. 

Cheers

On Nov 11, 2013, at 1:05 AM, ch huang <ju...@gmail.com> wrote:

> here is my java code ,i compile it and run in test env ,it's ok ,but when i run in product env ,it get error info
>  
> package com.hadoop.export;
> import java.io.BufferedReader;
> import java.io.File;
> import java.io.FileReader;
> import java.io.FileWriter;
> import java.io.IOException;
> import java.util.ArrayList;
> import java.util.Date;
> import java.util.List;
> import java.util.Random;
>  
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.hbase.HBaseConfiguration;
> import org.apache.hadoop.hbase.HColumnDescriptor;
> import org.apache.hadoop.hbase.HTableDescriptor;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.MasterNotRunningException ;
> import org.apache.hadoop.hbase.ZooKeeperConnectionException ;
> import org.apache.hadoop.hbase.client.Delete;
> import org.apache.hadoop.hbase.client.Get;
> import org.apache.hadoop.hbase.client.HBaseAdmin;
> import org.apache.hadoop.hbase.client.HTable;
> import org.apache.hadoop.hbase.client.HTablePool;
> import org.apache.hadoop.hbase.client.Result;
> import org.apache.hadoop.hbase.client.ResultScanner;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.client.Put;
> import org.apache.hadoop.hbase.filter.FilterList;
> import org.apache.hadoop.hbase.filter.PrefixFilter;
> import org.apache.hadoop.hbase.util.Bytes;
> 
> public class ExportMe {
>     private static Configuration conf =null;
>     private static int ROW_LENGTH=1000;
>     private boolean writeToWAL = true;
>     private static HTablePool pool = new HTablePool(conf,10);
>     private static final Random randomSeed =
>               new Random(System.currentTimeMillis());
>     private static long nextRandomSeed() {
>         return randomSeed.nextLong();
>       }
>     protected final Random rand = new Random(nextRandomSeed());
>      /**
>       * .....
>      */
>      static {
>          conf = HBaseConfiguration.create();
> //         conf.addResource( "hbase-site.xml");
>          conf.set("hbase.cluster.distributed", "true");
>          conf.set("hbase.zookeeper.quorum" ,"ch11,ch12,ch13" );
>          conf.set("hbase.zookeeper.property.clientPort" , "2181" );
>          conf = HBaseConfiguration.create(conf);
>      }
>   
>  public static void QueryAll(String tablename,String mykey,FileWriter fw) {
>        try {
>            HTable table = new HTable(conf, tablename);
>     //     HTable table = (HTable)(pool.getTable(tablename));
>                Scan myscan = new Scan();
>                FilterList fl = new FilterList();
>                fl.addFilter(new PrefixFilter(mykey.getBytes()));
>                myscan.setFilter(fl);
>            ResultScanner rs = table.getScanner(myscan);
>            for (Result r : rs) {
>            //    System.out.println("...rowkey:" + new String(r.getRow()));
>                 String myoutput = "";
>                for (KeyValue keyValue : r.raw()) {
>          /*          System.out.println(".." + new String(keyValue.getFamily())
>                               + "====.:" + new String(keyValue.getValue()));  */
>                    myoutput += new String(keyValue.getValue())+",";
>                }
> //              System.out.println(myoutput);
>                 fw.write(myoutput);
>            }
>            table.close();
>        } catch (IOException e) {
>            e.printStackTrace();
>        }
>    }
>        public static void readFileByLines(String fileName) {
>         File file = new File(fileName);
>         BufferedReader reader = null;
>         try {
>             System.out.println("...................");
>             reader = new BufferedReader(new FileReader(file));
>             String tempString = null;
>             int line = 1;
>             // ...........null.....
>                 FileWriter fw = new FileWriter("/tmp/myout.txt");
>             while ((tempString = reader.readLine()) != null) {
>                 // ....
>                // System.out.println("line " + line + ": " + tempString);
>                 QueryAll("dmp_pageview",tempString,fw);
>                 line++;
>             }
>                 fw.close();
>             reader.close();
>         } catch (IOException e) {
>             e.printStackTrace();
>         } finally {
>             if (reader != null) {
>                 try {
>                     reader.close();
>                 } catch (IOException e1) {
>                 }
>             }
>         }
>     }
>     public static void   main (String [] agrs) {
>         try {
>             long startTime = System.currentTimeMillis();
>    readFileByLines("/tmp/mysearch.log");
>             
>             long elapsedTime = System.currentTimeMillis() - startTime;
>             System.out.println("elapsed time is : "+elapsedTime+" ms\n");  
>         } catch (Exception e) {
>             e.printStackTrace();
>         }
>     }
> }
>  
>  
> # hadoop com/hadoop/export/ExportMe
> Error: Could not find or load main class com.hadoop.export.ExportMe
>  
> # ls com/hadoop/export/
> ExportMe.class
>  

Re: the code i can run in one cluster but error in another cluster

Posted by Ted Yu <yu...@gmail.com>.
You should package your class in a jar file. 

Cheers

On Nov 11, 2013, at 1:05 AM, ch huang <ju...@gmail.com> wrote:

> here is my java code ,i compile it and run in test env ,it's ok ,but when i run in product env ,it get error info
>  
> package com.hadoop.export;
> import java.io.BufferedReader;
> import java.io.File;
> import java.io.FileReader;
> import java.io.FileWriter;
> import java.io.IOException;
> import java.util.ArrayList;
> import java.util.Date;
> import java.util.List;
> import java.util.Random;
>  
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.hbase.HBaseConfiguration;
> import org.apache.hadoop.hbase.HColumnDescriptor;
> import org.apache.hadoop.hbase.HTableDescriptor;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.MasterNotRunningException ;
> import org.apache.hadoop.hbase.ZooKeeperConnectionException ;
> import org.apache.hadoop.hbase.client.Delete;
> import org.apache.hadoop.hbase.client.Get;
> import org.apache.hadoop.hbase.client.HBaseAdmin;
> import org.apache.hadoop.hbase.client.HTable;
> import org.apache.hadoop.hbase.client.HTablePool;
> import org.apache.hadoop.hbase.client.Result;
> import org.apache.hadoop.hbase.client.ResultScanner;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.client.Put;
> import org.apache.hadoop.hbase.filter.FilterList;
> import org.apache.hadoop.hbase.filter.PrefixFilter;
> import org.apache.hadoop.hbase.util.Bytes;
> 
> public class ExportMe {
>     private static Configuration conf =null;
>     private static int ROW_LENGTH=1000;
>     private boolean writeToWAL = true;
>     private static HTablePool pool = new HTablePool(conf,10);
>     private static final Random randomSeed =
>               new Random(System.currentTimeMillis());
>     private static long nextRandomSeed() {
>         return randomSeed.nextLong();
>       }
>     protected final Random rand = new Random(nextRandomSeed());
>      /**
>       * .....
>      */
>      static {
>          conf = HBaseConfiguration.create();
> //         conf.addResource( "hbase-site.xml");
>          conf.set("hbase.cluster.distributed", "true");
>          conf.set("hbase.zookeeper.quorum" ,"ch11,ch12,ch13" );
>          conf.set("hbase.zookeeper.property.clientPort" , "2181" );
>          conf = HBaseConfiguration.create(conf);
>      }
>   
>  public static void QueryAll(String tablename,String mykey,FileWriter fw) {
>        try {
>            HTable table = new HTable(conf, tablename);
>     //     HTable table = (HTable)(pool.getTable(tablename));
>                Scan myscan = new Scan();
>                FilterList fl = new FilterList();
>                fl.addFilter(new PrefixFilter(mykey.getBytes()));
>                myscan.setFilter(fl);
>            ResultScanner rs = table.getScanner(myscan);
>            for (Result r : rs) {
>            //    System.out.println("...rowkey:" + new String(r.getRow()));
>                 String myoutput = "";
>                for (KeyValue keyValue : r.raw()) {
>          /*          System.out.println(".." + new String(keyValue.getFamily())
>                               + "====.:" + new String(keyValue.getValue()));  */
>                    myoutput += new String(keyValue.getValue())+",";
>                }
> //              System.out.println(myoutput);
>                 fw.write(myoutput);
>            }
>            table.close();
>        } catch (IOException e) {
>            e.printStackTrace();
>        }
>    }
>        public static void readFileByLines(String fileName) {
>         File file = new File(fileName);
>         BufferedReader reader = null;
>         try {
>             System.out.println("...................");
>             reader = new BufferedReader(new FileReader(file));
>             String tempString = null;
>             int line = 1;
>             // ...........null.....
>                 FileWriter fw = new FileWriter("/tmp/myout.txt");
>             while ((tempString = reader.readLine()) != null) {
>                 // ....
>                // System.out.println("line " + line + ": " + tempString);
>                 QueryAll("dmp_pageview",tempString,fw);
>                 line++;
>             }
>                 fw.close();
>             reader.close();
>         } catch (IOException e) {
>             e.printStackTrace();
>         } finally {
>             if (reader != null) {
>                 try {
>                     reader.close();
>                 } catch (IOException e1) {
>                 }
>             }
>         }
>     }
>     public static void   main (String [] agrs) {
>         try {
>             long startTime = System.currentTimeMillis();
>    readFileByLines("/tmp/mysearch.log");
>             
>             long elapsedTime = System.currentTimeMillis() - startTime;
>             System.out.println("elapsed time is : "+elapsedTime+" ms\n");  
>         } catch (Exception e) {
>             e.printStackTrace();
>         }
>     }
> }
>  
>  
> # hadoop com/hadoop/export/ExportMe
> Error: Could not find or load main class com.hadoop.export.ExportMe
>  
> # ls com/hadoop/export/
> ExportMe.class
>