You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@oozie.apache.org by Mohammad Tariq <do...@gmail.com> on 2012/12/08 20:40:12 UTC

Unable to schedule my first job

Hello list,

         I have just started with Oozie and trying to follow the tutorial
at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am facing
some issues as my "Mapper" class is not reachable(as per the JT logs).
Nothing complex, just tying to run the WordCount program. I have specified
the details below. in case anybody needs it :

Location of the workflow components directory -
hdfs://localhost:9000/mr

Contents inside /mr -
job.properties
workflow.xml
/lib
/lib/wc.jar

Contents of job.properties -
nameNode=hdfs://localhost:9000
jobTracker=localhost:9001
queueName=default
examplesRoot=mr
oozie.wf.application.path=${nameNode}/${examplesRoot}
inputDir=/mapin
outputDir=/mapout

Contents of workflow.xml -
<workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
    <start to='wordcount'/>
    <action name='wordcount'>
        <map-reduce>
            <job-tracker>${jobTracker}</job-tracker>
            <name-node>${nameNode}</name-node>
            <prepare>
            </prepare>
            <configuration>
                <property>
                    <name>mapred.job.queue.name</name>
                    <value>${queueName}</value>
                </property>
                <property>
                    <name>mapred.mapper.class</name>
                    <value>WordCount.Map</value>
                </property>
                <property>
                    <name>mapred.reducer.class</name>
                    <value>WordCount.Reduce</value>
                </property>
                <property>
                    <name>mapred.input.dir</name>
                    <value>${inputDir}</value>
                </property>
                <property>
                    <name>mapred.output.dir</name>
                    <value>${outputDir}</value>
                </property>
            </configuration>
        </map-reduce>
        <ok to='end'/>
        <error to='end'/>
    </action>
    <!--kill name='kill'>
        <value>${wf:errorCode("wordcount")}</value>
    </kill-->
    <end name='end'/>
</workflow-app>

Here is the program -
import java.io.IOException;
import java.lang.InterruptedException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class WordCount {
/**
 * The map class of WordCount.
 */
public static class Map extends Mapper<Object, Text, Text, IntWritable> {

    private final static IntWritable one = new IntWritable(1);
    private Text word = new Text();

    public void map(Object key, Text value, Context context)
        throws IOException, InterruptedException {
        StringTokenizer itr = new StringTokenizer(value.toString());
        while (itr.hasMoreTokens()) {
            word.set(itr.nextToken());
            context.write(word, one);
        }
    }
}
/**
 * The reducer class of WordCount
 */
public static class Reduce extends Reducer<Text, IntWritable, Text,
IntWritable> {
    public void reduce(Text key, Iterable<IntWritable> values, Context
context)
        throws IOException, InterruptedException {
        int sum = 0;
        for (IntWritable value : values) {
            sum += value.get();
        }
        context.write(key, new IntWritable(sum));
    }
}
/**
 * The main entry point.
 */
public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = new Job(conf, "WordCount");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path("/mapin/"));
    FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}

This is the error log -

java.lang.RuntimeException: Error in configuring object
	at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
	at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
	at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:601)
	at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
	... 9 more
Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
java.lang.ClassNotFoundException: WordCount.Map
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
	at org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
	at org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
	... 14 more
Caused by: java.lang.RuntimeException:
java.lang.ClassNotFoundException: WordCount.Map
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
	... 16 more
Caused by: java.lang.ClassNotFoundException: WordCount.Map
	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
	at java.security.AccessController.doPrivileged(Native Method)
	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:264)
	at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
	... 17 more


Here is the command which I am using to submit the workfloe -
bin/oozie job -oozie http://localhost:11000/oozie/ -config
~/mr/job.properties -run

Need some help. Many thanks.
(Please pardon my ignorance)

Regards,
    Mohammad Tariq

Re: Unable to schedule my first job

Posted by Mohammad Tariq <do...@gmail.com>.
FYI,

      I am able to run the examples successfully.

Regards,
    Mohammad Tariq



On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com> wrote:

> Thank you so much for the quick response Harsh. Please find answers to
> your questions below(in order) :
>
> 1- Yes, it works.
> 2- Yes, jar contains all the classes.
> 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
> combination OK?
>
> Regards,
>     Mohammad Tariq
>
>
>
> On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:
>
>> Hi,
>>
>> A couple of things:
>>
>> 1. Can you verify if running your job directly works?
>> 2. Can you verify if your packaged jar does contain the classes?
>> (using a command like jar -tvf <jar> should tell you).
>> 2. The Oozie map-reduce action relies on the Stable API, not the New API.
>>
>> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com>
>> wrote:
>> > Hello list,
>> >
>> >          I have just started with Oozie and trying to follow the
>> tutorial
>> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am
>> facing
>> > some issues as my "Mapper" class is not reachable(as per the JT logs).
>> > Nothing complex, just tying to run the WordCount program. I have
>> specified
>> > the details below. in case anybody needs it :
>> >
>> > Location of the workflow components directory -
>> > hdfs://localhost:9000/mr
>> >
>> > Contents inside /mr -
>> > job.properties
>> > workflow.xml
>> > /lib
>> > /lib/wc.jar
>> >
>> > Contents of job.properties -
>> > nameNode=hdfs://localhost:9000
>> > jobTracker=localhost:9001
>> > queueName=default
>> > examplesRoot=mr
>> > oozie.wf.application.path=${nameNode}/${examplesRoot}
>> > inputDir=/mapin
>> > outputDir=/mapout
>> >
>> > Contents of workflow.xml -
>> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
>> >     <start to='wordcount'/>
>> >     <action name='wordcount'>
>> >         <map-reduce>
>> >             <job-tracker>${jobTracker}</job-tracker>
>> >             <name-node>${nameNode}</name-node>
>> >             <prepare>
>> >             </prepare>
>> >             <configuration>
>> >                 <property>
>> >                     <name>mapred.job.queue.name</name>
>> >                     <value>${queueName}</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.mapper.class</name>
>> >                     <value>WordCount.Map</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.reducer.class</name>
>> >                     <value>WordCount.Reduce</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.input.dir</name>
>> >                     <value>${inputDir}</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.output.dir</name>
>> >                     <value>${outputDir}</value>
>> >                 </property>
>> >             </configuration>
>> >         </map-reduce>
>> >         <ok to='end'/>
>> >         <error to='end'/>
>> >     </action>
>> >     <!--kill name='kill'>
>> >         <value>${wf:errorCode("wordcount")}</value>
>> >     </kill-->
>> >     <end name='end'/>
>> > </workflow-app>
>> >
>> > Here is the program -
>> > import java.io.IOException;
>> > import java.lang.InterruptedException;
>> > import java.util.StringTokenizer;
>> >
>> > import org.apache.hadoop.io.IntWritable;
>> > import org.apache.hadoop.io.Text;
>> > import org.apache.hadoop.conf.Configuration;
>> > import org.apache.hadoop.fs.Path;
>> > import org.apache.hadoop.mapreduce.Job;
>> > import org.apache.hadoop.mapreduce.Mapper;
>> > import org.apache.hadoop.mapreduce.Reducer;
>> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
>> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
>> > import org.apache.hadoop.util.GenericOptionsParser;
>> >
>> > public class WordCount {
>> > /**
>> >  * The map class of WordCount.
>> >  */
>> > public static class Map extends Mapper<Object, Text, Text, IntWritable>
>> {
>> >
>> >     private final static IntWritable one = new IntWritable(1);
>> >     private Text word = new Text();
>> >
>> >     public void map(Object key, Text value, Context context)
>> >         throws IOException, InterruptedException {
>> >         StringTokenizer itr = new StringTokenizer(value.toString());
>> >         while (itr.hasMoreTokens()) {
>> >             word.set(itr.nextToken());
>> >             context.write(word, one);
>> >         }
>> >     }
>> > }
>> > /**
>> >  * The reducer class of WordCount
>> >  */
>> > public static class Reduce extends Reducer<Text, IntWritable, Text,
>> > IntWritable> {
>> >     public void reduce(Text key, Iterable<IntWritable> values, Context
>> > context)
>> >         throws IOException, InterruptedException {
>> >         int sum = 0;
>> >         for (IntWritable value : values) {
>> >             sum += value.get();
>> >         }
>> >         context.write(key, new IntWritable(sum));
>> >     }
>> > }
>> > /**
>> >  * The main entry point.
>> >  */
>> > public static void main(String[] args) throws Exception {
>> >     Configuration conf = new Configuration();
>> >     Job job = new Job(conf, "WordCount");
>> >     job.setJarByClass(WordCount.class);
>> >     job.setMapperClass(Map.class);
>> >     job.setReducerClass(Reduce.class);
>> >     job.setOutputKeyClass(Text.class);
>> >     job.setOutputValueClass(IntWritable.class);
>> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
>> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
>> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
>> >   }
>> > }
>> >
>> > This is the error log -
>> >
>> > java.lang.RuntimeException: Error in configuring object
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
>> >         at
>> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
>> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
>> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
>> >         at java.security.AccessController.doPrivileged(Native Method)
>> >         at javax.security.auth.Subject.doAs(Subject.java:415)
>> >         at
>> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
>> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
>> > Caused by: java.lang.reflect.InvocationTargetException
>> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> >         at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> >         at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> >         at java.lang.reflect.Method.invoke(Method.java:601)
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
>> >         ... 9 more
>> > Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
>> > java.lang.ClassNotFoundException: WordCount.Map
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
>> >         at
>> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
>> >         at
>> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
>> >         ... 14 more
>> > Caused by: java.lang.RuntimeException:
>> > java.lang.ClassNotFoundException: WordCount.Map
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
>> >         ... 16 more
>> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
>> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
>> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
>> >         at java.security.AccessController.doPrivileged(Native Method)
>> >         at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
>> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
>> >         at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
>> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
>> >         at java.lang.Class.forName0(Native Method)
>> >         at java.lang.Class.forName(Class.java:264)
>> >         at
>> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
>> >         ... 17 more
>> >
>> >
>> > Here is the command which I am using to submit the workfloe -
>> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
>> > ~/mr/job.properties -run
>> >
>> > Need some help. Many thanks.
>> > (Please pardon my ignorance)
>> >
>> > Regards,
>> >     Mohammad Tariq
>>
>>
>>
>> --
>> Harsh J
>>
>
>

Re: Unable to schedule my first job

Posted by Mohammad Tariq <do...@gmail.com>.
Oh, yeah. My bad.

Anyways, thanks again.

Regards,
    Mohammad Tariq



On Sun, Dec 9, 2012 at 3:08 AM, Chris White <ch...@gmail.com> wrote:

> It's a Hadoop thing, rather than oozie
>
>
> On Sat, Dec 8, 2012 at 4:34 PM, Mohammad Tariq <do...@gmail.com> wrote:
>
> > Thank you so very much Chris. Worked like a charm.
> >
> > One quick question. Does Oozie-3.2.0 expects one to use the older API??
> >
> > Thank you Harsh for all the help and support.
> >
> > Regards,
> >     Mohammad Tariq
> >
> >
> >
> > On Sun, Dec 9, 2012 at 2:53 AM, Chris White <ch...@gmail.com>
> > wrote:
> >
> > > You should be able to run new API Map / Reducers - try setting the
> > new-api
> > > properties in your configuration set:
> > >
> > > <property>
> > >     <name>mapred.mapper.new-api</name>
> > >     <value>true</value>
> > > </property>
> > > <property>
> > >     <name>mapred.reducer.new-api</name>
> > >     <value>true</value>
> > > </property>
> > >
> > >
> > > On Sat, Dec 8, 2012 at 4:11 PM, Mohammad Tariq <do...@gmail.com>
> > wrote:
> > >
> > > > No luck. Same error
> > > >
> > > > I'll try to dig further. Thanks a lot for the help.
> > > >
> > > > Regards,
> > > >     Mohammad Tariq
> > > >
> > > >
> > > >
> > > > On Sun, Dec 9, 2012 at 2:12 AM, Mohammad Tariq <do...@gmail.com>
> > > wrote:
> > > >
> > > > > I see. Will give it a shot and see if that works.
> > > > >
> > > > > Thank you.
> > > > >
> > > > > Regards,
> > > > >     Mohammad Tariq
> > > > >
> > > > >
> > > > >
> > > > > On Sun, Dec 9, 2012 at 2:10 AM, Harsh J <ha...@cloudera.com>
> wrote:
> > > > >
> > > > >> The combo is OK, but the use of the mapreduce API may not be.
> > > > >>
> > > > >> I'd try removing inner classes and placing them as regular Map and
> > > > >> Reduce classes and reconfiguring/resubmitting.
> > > > >>
> > > > >> On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <
> dontariq@gmail.com>
> > > > >> wrote:
> > > > >> > Thank you so much for the quick response Harsh. Please find
> > answers
> > > to
> > > > >> your
> > > > >> > questions below(in order) :
> > > > >> >
> > > > >> > 1- Yes, it works.
> > > > >> > 2- Yes, jar contains all the classes.
> > > > >> > 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is
> > this
> > > > >> > combination OK?
> > > > >> >
> > > > >> > Regards,
> > > > >> >     Mohammad Tariq
> > > > >> >
> > > > >> >
> > > > >> >
> > > > >> > On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com>
> > wrote:
> > > > >> >
> > > > >> >> Hi,
> > > > >> >>
> > > > >> >> A couple of things:
> > > > >> >>
> > > > >> >> 1. Can you verify if running your job directly works?
> > > > >> >> 2. Can you verify if your packaged jar does contain the
> classes?
> > > > >> >> (using a command like jar -tvf <jar> should tell you).
> > > > >> >> 2. The Oozie map-reduce action relies on the Stable API, not
> the
> > > New
> > > > >> API.
> > > > >> >>
> > > > >> >> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <
> > dontariq@gmail.com
> > > >
> > > > >> wrote:
> > > > >> >> > Hello list,
> > > > >> >> >
> > > > >> >> >          I have just started with Oozie and trying to follow
> > the
> > > > >> tutorial
> > > > >> >> > at
> https://cwiki.apache.org/OOZIE/map-reduce-cookbook.htmlbut I
> > > > am
> > > > >> >> facing
> > > > >> >> > some issues as my "Mapper" class is not reachable(as per the
> JT
> > > > >> logs).
> > > > >> >> > Nothing complex, just tying to run the WordCount program. I
> > have
> > > > >> >> specified
> > > > >> >> > the details below. in case anybody needs it :
> > > > >> >> >
> > > > >> >> > Location of the workflow components directory -
> > > > >> >> > hdfs://localhost:9000/mr
> > > > >> >> >
> > > > >> >> > Contents inside /mr -
> > > > >> >> > job.properties
> > > > >> >> > workflow.xml
> > > > >> >> > /lib
> > > > >> >> > /lib/wc.jar
> > > > >> >> >
> > > > >> >> > Contents of job.properties -
> > > > >> >> > nameNode=hdfs://localhost:9000
> > > > >> >> > jobTracker=localhost:9001
> > > > >> >> > queueName=default
> > > > >> >> > examplesRoot=mr
> > > > >> >> > oozie.wf.application.path=${nameNode}/${examplesRoot}
> > > > >> >> > inputDir=/mapin
> > > > >> >> > outputDir=/mapout
> > > > >> >> >
> > > > >> >> > Contents of workflow.xml -
> > > > >> >> > <workflow-app name='wordcount-wf'
> > xmlns="uri:oozie:workflow:0.2">
> > > > >> >> >     <start to='wordcount'/>
> > > > >> >> >     <action name='wordcount'>
> > > > >> >> >         <map-reduce>
> > > > >> >> >             <job-tracker>${jobTracker}</job-tracker>
> > > > >> >> >             <name-node>${nameNode}</name-node>
> > > > >> >> >             <prepare>
> > > > >> >> >             </prepare>
> > > > >> >> >             <configuration>
> > > > >> >> >                 <property>
> > > > >> >> >                     <name>mapred.job.queue.name</name>
> > > > >> >> >                     <value>${queueName}</value>
> > > > >> >> >                 </property>
> > > > >> >> >                 <property>
> > > > >> >> >                     <name>mapred.mapper.class</name>
> > > > >> >> >                     <value>WordCount.Map</value>
> > > > >> >> >                 </property>
> > > > >> >> >                 <property>
> > > > >> >> >                     <name>mapred.reducer.class</name>
> > > > >> >> >                     <value>WordCount.Reduce</value>
> > > > >> >> >                 </property>
> > > > >> >> >                 <property>
> > > > >> >> >                     <name>mapred.input.dir</name>
> > > > >> >> >                     <value>${inputDir}</value>
> > > > >> >> >                 </property>
> > > > >> >> >                 <property>
> > > > >> >> >                     <name>mapred.output.dir</name>
> > > > >> >> >                     <value>${outputDir}</value>
> > > > >> >> >                 </property>
> > > > >> >> >             </configuration>
> > > > >> >> >         </map-reduce>
> > > > >> >> >         <ok to='end'/>
> > > > >> >> >         <error to='end'/>
> > > > >> >> >     </action>
> > > > >> >> >     <!--kill name='kill'>
> > > > >> >> >         <value>${wf:errorCode("wordcount")}</value>
> > > > >> >> >     </kill-->
> > > > >> >> >     <end name='end'/>
> > > > >> >> > </workflow-app>
> > > > >> >> >
> > > > >> >> > Here is the program -
> > > > >> >> > import java.io.IOException;
> > > > >> >> > import java.lang.InterruptedException;
> > > > >> >> > import java.util.StringTokenizer;
> > > > >> >> >
> > > > >> >> > import org.apache.hadoop.io.IntWritable;
> > > > >> >> > import org.apache.hadoop.io.Text;
> > > > >> >> > import org.apache.hadoop.conf.Configuration;
> > > > >> >> > import org.apache.hadoop.fs.Path;
> > > > >> >> > import org.apache.hadoop.mapreduce.Job;
> > > > >> >> > import org.apache.hadoop.mapreduce.Mapper;
> > > > >> >> > import org.apache.hadoop.mapreduce.Reducer;
> > > > >> >> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> > > > >> >> > import
> org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> > > > >> >> > import org.apache.hadoop.util.GenericOptionsParser;
> > > > >> >> >
> > > > >> >> > public class WordCount {
> > > > >> >> > /**
> > > > >> >> >  * The map class of WordCount.
> > > > >> >> >  */
> > > > >> >> > public static class Map extends Mapper<Object, Text, Text,
> > > > >> IntWritable> {
> > > > >> >> >
> > > > >> >> >     private final static IntWritable one = new
> IntWritable(1);
> > > > >> >> >     private Text word = new Text();
> > > > >> >> >
> > > > >> >> >     public void map(Object key, Text value, Context context)
> > > > >> >> >         throws IOException, InterruptedException {
> > > > >> >> >         StringTokenizer itr = new
> > > > StringTokenizer(value.toString());
> > > > >> >> >         while (itr.hasMoreTokens()) {
> > > > >> >> >             word.set(itr.nextToken());
> > > > >> >> >             context.write(word, one);
> > > > >> >> >         }
> > > > >> >> >     }
> > > > >> >> > }
> > > > >> >> > /**
> > > > >> >> >  * The reducer class of WordCount
> > > > >> >> >  */
> > > > >> >> > public static class Reduce extends Reducer<Text, IntWritable,
> > > Text,
> > > > >> >> > IntWritable> {
> > > > >> >> >     public void reduce(Text key, Iterable<IntWritable>
> values,
> > > > >> Context
> > > > >> >> > context)
> > > > >> >> >         throws IOException, InterruptedException {
> > > > >> >> >         int sum = 0;
> > > > >> >> >         for (IntWritable value : values) {
> > > > >> >> >             sum += value.get();
> > > > >> >> >         }
> > > > >> >> >         context.write(key, new IntWritable(sum));
> > > > >> >> >     }
> > > > >> >> > }
> > > > >> >> > /**
> > > > >> >> >  * The main entry point.
> > > > >> >> >  */
> > > > >> >> > public static void main(String[] args) throws Exception {
> > > > >> >> >     Configuration conf = new Configuration();
> > > > >> >> >     Job job = new Job(conf, "WordCount");
> > > > >> >> >     job.setJarByClass(WordCount.class);
> > > > >> >> >     job.setMapperClass(Map.class);
> > > > >> >> >     job.setReducerClass(Reduce.class);
> > > > >> >> >     job.setOutputKeyClass(Text.class);
> > > > >> >> >     job.setOutputValueClass(IntWritable.class);
> > > > >> >> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
> > > > >> >> >     FileOutputFormat.setOutputPath(job, new
> Path("/mapout/"));
> > > > >> >> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
> > > > >> >> >   }
> > > > >> >> > }
> > > > >> >> >
> > > > >> >> > This is the error log -
> > > > >> >> >
> > > > >> >> > java.lang.RuntimeException: Error in configuring object
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
> > > > >> >> >         at
> > > > >> >>
> > > >
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
> > > > >> >> >         at
> > > > >> >> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
> > > > >> >> >         at
> > org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
> > > > >> >> >         at
> org.apache.hadoop.mapred.Child$4.run(Child.java:255)
> > > > >> >> >         at java.security.AccessController.doPrivileged(Native
> > > > Method)
> > > > >> >> >         at javax.security.auth.Subject.doAs(Subject.java:415)
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
> > > > >> >> >         at
> org.apache.hadoop.mapred.Child.main(Child.java:249)
> > > > >> >> > Caused by: java.lang.reflect.InvocationTargetException
> > > > >> >> >         at
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native
> > > > >> Method)
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> > > > >> >> >         at java.lang.reflect.Method.invoke(Method.java:601)
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
> > > > >> >> >         ... 9 more
> > > > >> >> > Caused by: java.lang.RuntimeException:
> > > java.lang.RuntimeException:
> > > > >> >> > java.lang.ClassNotFoundException: WordCount.Map
> > > > >> >> >         at
> > > > >> >>
> > > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
> > > > >> >> >         at
> > > > >> >>
> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
> > > > >> >> >         at
> > > > >> >> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
> > > > >> >> >         ... 14 more
> > > > >> >> > Caused by: java.lang.RuntimeException:
> > > > >> >> > java.lang.ClassNotFoundException: WordCount.Map
> > > > >> >> >         at
> > > > >> >>
> > > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
> > > > >> >> >         at
> > > > >> >>
> > > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
> > > > >> >> >         ... 16 more
> > > > >> >> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
> > > > >> >> >         at
> > java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> > > > >> >> >         at
> > java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> > > > >> >> >         at java.security.AccessController.doPrivileged(Native
> > > > Method)
> > > > >> >> >         at
> > > > java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> > > > >> >> >         at
> > java.lang.ClassLoader.loadClass(ClassLoader.java:423)
> > > > >> >> >         at
> > > > >> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> > > > >> >> >         at
> > java.lang.ClassLoader.loadClass(ClassLoader.java:356)
> > > > >> >> >         at java.lang.Class.forName0(Native Method)
> > > > >> >> >         at java.lang.Class.forName(Class.java:264)
> > > > >> >> >         at
> > > > >> >>
> > > > >>
> > > >
> > >
> >
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
> > > > >> >> >         at
> > > > >> >>
> > > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
> > > > >> >> >         ... 17 more
> > > > >> >> >
> > > > >> >> >
> > > > >> >> > Here is the command which I am using to submit the workfloe -
> > > > >> >> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
> > > > >> >> > ~/mr/job.properties -run
> > > > >> >> >
> > > > >> >> > Need some help. Many thanks.
> > > > >> >> > (Please pardon my ignorance)
> > > > >> >> >
> > > > >> >> > Regards,
> > > > >> >> >     Mohammad Tariq
> > > > >> >>
> > > > >> >>
> > > > >> >>
> > > > >> >> --
> > > > >> >> Harsh J
> > > > >> >>
> > > > >>
> > > > >>
> > > > >>
> > > > >> --
> > > > >> Harsh J
> > > > >>
> > > > >
> > > > >
> > > >
> > >
> >
>

Re: Unable to schedule my first job

Posted by Chris White <ch...@gmail.com>.
It's a Hadoop thing, rather than oozie


On Sat, Dec 8, 2012 at 4:34 PM, Mohammad Tariq <do...@gmail.com> wrote:

> Thank you so very much Chris. Worked like a charm.
>
> One quick question. Does Oozie-3.2.0 expects one to use the older API??
>
> Thank you Harsh for all the help and support.
>
> Regards,
>     Mohammad Tariq
>
>
>
> On Sun, Dec 9, 2012 at 2:53 AM, Chris White <ch...@gmail.com>
> wrote:
>
> > You should be able to run new API Map / Reducers - try setting the
> new-api
> > properties in your configuration set:
> >
> > <property>
> >     <name>mapred.mapper.new-api</name>
> >     <value>true</value>
> > </property>
> > <property>
> >     <name>mapred.reducer.new-api</name>
> >     <value>true</value>
> > </property>
> >
> >
> > On Sat, Dec 8, 2012 at 4:11 PM, Mohammad Tariq <do...@gmail.com>
> wrote:
> >
> > > No luck. Same error
> > >
> > > I'll try to dig further. Thanks a lot for the help.
> > >
> > > Regards,
> > >     Mohammad Tariq
> > >
> > >
> > >
> > > On Sun, Dec 9, 2012 at 2:12 AM, Mohammad Tariq <do...@gmail.com>
> > wrote:
> > >
> > > > I see. Will give it a shot and see if that works.
> > > >
> > > > Thank you.
> > > >
> > > > Regards,
> > > >     Mohammad Tariq
> > > >
> > > >
> > > >
> > > > On Sun, Dec 9, 2012 at 2:10 AM, Harsh J <ha...@cloudera.com> wrote:
> > > >
> > > >> The combo is OK, but the use of the mapreduce API may not be.
> > > >>
> > > >> I'd try removing inner classes and placing them as regular Map and
> > > >> Reduce classes and reconfiguring/resubmitting.
> > > >>
> > > >> On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com>
> > > >> wrote:
> > > >> > Thank you so much for the quick response Harsh. Please find
> answers
> > to
> > > >> your
> > > >> > questions below(in order) :
> > > >> >
> > > >> > 1- Yes, it works.
> > > >> > 2- Yes, jar contains all the classes.
> > > >> > 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is
> this
> > > >> > combination OK?
> > > >> >
> > > >> > Regards,
> > > >> >     Mohammad Tariq
> > > >> >
> > > >> >
> > > >> >
> > > >> > On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com>
> wrote:
> > > >> >
> > > >> >> Hi,
> > > >> >>
> > > >> >> A couple of things:
> > > >> >>
> > > >> >> 1. Can you verify if running your job directly works?
> > > >> >> 2. Can you verify if your packaged jar does contain the classes?
> > > >> >> (using a command like jar -tvf <jar> should tell you).
> > > >> >> 2. The Oozie map-reduce action relies on the Stable API, not the
> > New
> > > >> API.
> > > >> >>
> > > >> >> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <
> dontariq@gmail.com
> > >
> > > >> wrote:
> > > >> >> > Hello list,
> > > >> >> >
> > > >> >> >          I have just started with Oozie and trying to follow
> the
> > > >> tutorial
> > > >> >> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.htmlbut I
> > > am
> > > >> >> facing
> > > >> >> > some issues as my "Mapper" class is not reachable(as per the JT
> > > >> logs).
> > > >> >> > Nothing complex, just tying to run the WordCount program. I
> have
> > > >> >> specified
> > > >> >> > the details below. in case anybody needs it :
> > > >> >> >
> > > >> >> > Location of the workflow components directory -
> > > >> >> > hdfs://localhost:9000/mr
> > > >> >> >
> > > >> >> > Contents inside /mr -
> > > >> >> > job.properties
> > > >> >> > workflow.xml
> > > >> >> > /lib
> > > >> >> > /lib/wc.jar
> > > >> >> >
> > > >> >> > Contents of job.properties -
> > > >> >> > nameNode=hdfs://localhost:9000
> > > >> >> > jobTracker=localhost:9001
> > > >> >> > queueName=default
> > > >> >> > examplesRoot=mr
> > > >> >> > oozie.wf.application.path=${nameNode}/${examplesRoot}
> > > >> >> > inputDir=/mapin
> > > >> >> > outputDir=/mapout
> > > >> >> >
> > > >> >> > Contents of workflow.xml -
> > > >> >> > <workflow-app name='wordcount-wf'
> xmlns="uri:oozie:workflow:0.2">
> > > >> >> >     <start to='wordcount'/>
> > > >> >> >     <action name='wordcount'>
> > > >> >> >         <map-reduce>
> > > >> >> >             <job-tracker>${jobTracker}</job-tracker>
> > > >> >> >             <name-node>${nameNode}</name-node>
> > > >> >> >             <prepare>
> > > >> >> >             </prepare>
> > > >> >> >             <configuration>
> > > >> >> >                 <property>
> > > >> >> >                     <name>mapred.job.queue.name</name>
> > > >> >> >                     <value>${queueName}</value>
> > > >> >> >                 </property>
> > > >> >> >                 <property>
> > > >> >> >                     <name>mapred.mapper.class</name>
> > > >> >> >                     <value>WordCount.Map</value>
> > > >> >> >                 </property>
> > > >> >> >                 <property>
> > > >> >> >                     <name>mapred.reducer.class</name>
> > > >> >> >                     <value>WordCount.Reduce</value>
> > > >> >> >                 </property>
> > > >> >> >                 <property>
> > > >> >> >                     <name>mapred.input.dir</name>
> > > >> >> >                     <value>${inputDir}</value>
> > > >> >> >                 </property>
> > > >> >> >                 <property>
> > > >> >> >                     <name>mapred.output.dir</name>
> > > >> >> >                     <value>${outputDir}</value>
> > > >> >> >                 </property>
> > > >> >> >             </configuration>
> > > >> >> >         </map-reduce>
> > > >> >> >         <ok to='end'/>
> > > >> >> >         <error to='end'/>
> > > >> >> >     </action>
> > > >> >> >     <!--kill name='kill'>
> > > >> >> >         <value>${wf:errorCode("wordcount")}</value>
> > > >> >> >     </kill-->
> > > >> >> >     <end name='end'/>
> > > >> >> > </workflow-app>
> > > >> >> >
> > > >> >> > Here is the program -
> > > >> >> > import java.io.IOException;
> > > >> >> > import java.lang.InterruptedException;
> > > >> >> > import java.util.StringTokenizer;
> > > >> >> >
> > > >> >> > import org.apache.hadoop.io.IntWritable;
> > > >> >> > import org.apache.hadoop.io.Text;
> > > >> >> > import org.apache.hadoop.conf.Configuration;
> > > >> >> > import org.apache.hadoop.fs.Path;
> > > >> >> > import org.apache.hadoop.mapreduce.Job;
> > > >> >> > import org.apache.hadoop.mapreduce.Mapper;
> > > >> >> > import org.apache.hadoop.mapreduce.Reducer;
> > > >> >> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> > > >> >> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> > > >> >> > import org.apache.hadoop.util.GenericOptionsParser;
> > > >> >> >
> > > >> >> > public class WordCount {
> > > >> >> > /**
> > > >> >> >  * The map class of WordCount.
> > > >> >> >  */
> > > >> >> > public static class Map extends Mapper<Object, Text, Text,
> > > >> IntWritable> {
> > > >> >> >
> > > >> >> >     private final static IntWritable one = new IntWritable(1);
> > > >> >> >     private Text word = new Text();
> > > >> >> >
> > > >> >> >     public void map(Object key, Text value, Context context)
> > > >> >> >         throws IOException, InterruptedException {
> > > >> >> >         StringTokenizer itr = new
> > > StringTokenizer(value.toString());
> > > >> >> >         while (itr.hasMoreTokens()) {
> > > >> >> >             word.set(itr.nextToken());
> > > >> >> >             context.write(word, one);
> > > >> >> >         }
> > > >> >> >     }
> > > >> >> > }
> > > >> >> > /**
> > > >> >> >  * The reducer class of WordCount
> > > >> >> >  */
> > > >> >> > public static class Reduce extends Reducer<Text, IntWritable,
> > Text,
> > > >> >> > IntWritable> {
> > > >> >> >     public void reduce(Text key, Iterable<IntWritable> values,
> > > >> Context
> > > >> >> > context)
> > > >> >> >         throws IOException, InterruptedException {
> > > >> >> >         int sum = 0;
> > > >> >> >         for (IntWritable value : values) {
> > > >> >> >             sum += value.get();
> > > >> >> >         }
> > > >> >> >         context.write(key, new IntWritable(sum));
> > > >> >> >     }
> > > >> >> > }
> > > >> >> > /**
> > > >> >> >  * The main entry point.
> > > >> >> >  */
> > > >> >> > public static void main(String[] args) throws Exception {
> > > >> >> >     Configuration conf = new Configuration();
> > > >> >> >     Job job = new Job(conf, "WordCount");
> > > >> >> >     job.setJarByClass(WordCount.class);
> > > >> >> >     job.setMapperClass(Map.class);
> > > >> >> >     job.setReducerClass(Reduce.class);
> > > >> >> >     job.setOutputKeyClass(Text.class);
> > > >> >> >     job.setOutputValueClass(IntWritable.class);
> > > >> >> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
> > > >> >> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
> > > >> >> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
> > > >> >> >   }
> > > >> >> > }
> > > >> >> >
> > > >> >> > This is the error log -
> > > >> >> >
> > > >> >> > java.lang.RuntimeException: Error in configuring object
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
> > > >> >> >         at
> > > >> >>
> > > org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
> > > >> >> >         at
> > > >> >> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
> > > >> >> >         at
> org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
> > > >> >> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
> > > >> >> >         at java.security.AccessController.doPrivileged(Native
> > > Method)
> > > >> >> >         at javax.security.auth.Subject.doAs(Subject.java:415)
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
> > > >> >> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
> > > >> >> > Caused by: java.lang.reflect.InvocationTargetException
> > > >> >> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native
> > > >> Method)
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> > > >> >> >         at java.lang.reflect.Method.invoke(Method.java:601)
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
> > > >> >> >         ... 9 more
> > > >> >> > Caused by: java.lang.RuntimeException:
> > java.lang.RuntimeException:
> > > >> >> > java.lang.ClassNotFoundException: WordCount.Map
> > > >> >> >         at
> > > >> >>
> > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
> > > >> >> >         at
> > > >> >> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
> > > >> >> >         at
> > > >> >> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
> > > >> >> >         ... 14 more
> > > >> >> > Caused by: java.lang.RuntimeException:
> > > >> >> > java.lang.ClassNotFoundException: WordCount.Map
> > > >> >> >         at
> > > >> >>
> > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
> > > >> >> >         at
> > > >> >>
> > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
> > > >> >> >         ... 16 more
> > > >> >> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
> > > >> >> >         at
> java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> > > >> >> >         at
> java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> > > >> >> >         at java.security.AccessController.doPrivileged(Native
> > > Method)
> > > >> >> >         at
> > > java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> > > >> >> >         at
> java.lang.ClassLoader.loadClass(ClassLoader.java:423)
> > > >> >> >         at
> > > >> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> > > >> >> >         at
> java.lang.ClassLoader.loadClass(ClassLoader.java:356)
> > > >> >> >         at java.lang.Class.forName0(Native Method)
> > > >> >> >         at java.lang.Class.forName(Class.java:264)
> > > >> >> >         at
> > > >> >>
> > > >>
> > >
> >
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
> > > >> >> >         at
> > > >> >>
> > org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
> > > >> >> >         ... 17 more
> > > >> >> >
> > > >> >> >
> > > >> >> > Here is the command which I am using to submit the workfloe -
> > > >> >> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
> > > >> >> > ~/mr/job.properties -run
> > > >> >> >
> > > >> >> > Need some help. Many thanks.
> > > >> >> > (Please pardon my ignorance)
> > > >> >> >
> > > >> >> > Regards,
> > > >> >> >     Mohammad Tariq
> > > >> >>
> > > >> >>
> > > >> >>
> > > >> >> --
> > > >> >> Harsh J
> > > >> >>
> > > >>
> > > >>
> > > >>
> > > >> --
> > > >> Harsh J
> > > >>
> > > >
> > > >
> > >
> >
>

Re: Unable to schedule my first job

Posted by Mohammad Tariq <do...@gmail.com>.
Thank you so very much Chris. Worked like a charm.

One quick question. Does Oozie-3.2.0 expects one to use the older API??

Thank you Harsh for all the help and support.

Regards,
    Mohammad Tariq



On Sun, Dec 9, 2012 at 2:53 AM, Chris White <ch...@gmail.com> wrote:

> You should be able to run new API Map / Reducers - try setting the new-api
> properties in your configuration set:
>
> <property>
>     <name>mapred.mapper.new-api</name>
>     <value>true</value>
> </property>
> <property>
>     <name>mapred.reducer.new-api</name>
>     <value>true</value>
> </property>
>
>
> On Sat, Dec 8, 2012 at 4:11 PM, Mohammad Tariq <do...@gmail.com> wrote:
>
> > No luck. Same error
> >
> > I'll try to dig further. Thanks a lot for the help.
> >
> > Regards,
> >     Mohammad Tariq
> >
> >
> >
> > On Sun, Dec 9, 2012 at 2:12 AM, Mohammad Tariq <do...@gmail.com>
> wrote:
> >
> > > I see. Will give it a shot and see if that works.
> > >
> > > Thank you.
> > >
> > > Regards,
> > >     Mohammad Tariq
> > >
> > >
> > >
> > > On Sun, Dec 9, 2012 at 2:10 AM, Harsh J <ha...@cloudera.com> wrote:
> > >
> > >> The combo is OK, but the use of the mapreduce API may not be.
> > >>
> > >> I'd try removing inner classes and placing them as regular Map and
> > >> Reduce classes and reconfiguring/resubmitting.
> > >>
> > >> On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com>
> > >> wrote:
> > >> > Thank you so much for the quick response Harsh. Please find answers
> to
> > >> your
> > >> > questions below(in order) :
> > >> >
> > >> > 1- Yes, it works.
> > >> > 2- Yes, jar contains all the classes.
> > >> > 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
> > >> > combination OK?
> > >> >
> > >> > Regards,
> > >> >     Mohammad Tariq
> > >> >
> > >> >
> > >> >
> > >> > On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:
> > >> >
> > >> >> Hi,
> > >> >>
> > >> >> A couple of things:
> > >> >>
> > >> >> 1. Can you verify if running your job directly works?
> > >> >> 2. Can you verify if your packaged jar does contain the classes?
> > >> >> (using a command like jar -tvf <jar> should tell you).
> > >> >> 2. The Oozie map-reduce action relies on the Stable API, not the
> New
> > >> API.
> > >> >>
> > >> >> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <dontariq@gmail.com
> >
> > >> wrote:
> > >> >> > Hello list,
> > >> >> >
> > >> >> >          I have just started with Oozie and trying to follow the
> > >> tutorial
> > >> >> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I
> > am
> > >> >> facing
> > >> >> > some issues as my "Mapper" class is not reachable(as per the JT
> > >> logs).
> > >> >> > Nothing complex, just tying to run the WordCount program. I have
> > >> >> specified
> > >> >> > the details below. in case anybody needs it :
> > >> >> >
> > >> >> > Location of the workflow components directory -
> > >> >> > hdfs://localhost:9000/mr
> > >> >> >
> > >> >> > Contents inside /mr -
> > >> >> > job.properties
> > >> >> > workflow.xml
> > >> >> > /lib
> > >> >> > /lib/wc.jar
> > >> >> >
> > >> >> > Contents of job.properties -
> > >> >> > nameNode=hdfs://localhost:9000
> > >> >> > jobTracker=localhost:9001
> > >> >> > queueName=default
> > >> >> > examplesRoot=mr
> > >> >> > oozie.wf.application.path=${nameNode}/${examplesRoot}
> > >> >> > inputDir=/mapin
> > >> >> > outputDir=/mapout
> > >> >> >
> > >> >> > Contents of workflow.xml -
> > >> >> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
> > >> >> >     <start to='wordcount'/>
> > >> >> >     <action name='wordcount'>
> > >> >> >         <map-reduce>
> > >> >> >             <job-tracker>${jobTracker}</job-tracker>
> > >> >> >             <name-node>${nameNode}</name-node>
> > >> >> >             <prepare>
> > >> >> >             </prepare>
> > >> >> >             <configuration>
> > >> >> >                 <property>
> > >> >> >                     <name>mapred.job.queue.name</name>
> > >> >> >                     <value>${queueName}</value>
> > >> >> >                 </property>
> > >> >> >                 <property>
> > >> >> >                     <name>mapred.mapper.class</name>
> > >> >> >                     <value>WordCount.Map</value>
> > >> >> >                 </property>
> > >> >> >                 <property>
> > >> >> >                     <name>mapred.reducer.class</name>
> > >> >> >                     <value>WordCount.Reduce</value>
> > >> >> >                 </property>
> > >> >> >                 <property>
> > >> >> >                     <name>mapred.input.dir</name>
> > >> >> >                     <value>${inputDir}</value>
> > >> >> >                 </property>
> > >> >> >                 <property>
> > >> >> >                     <name>mapred.output.dir</name>
> > >> >> >                     <value>${outputDir}</value>
> > >> >> >                 </property>
> > >> >> >             </configuration>
> > >> >> >         </map-reduce>
> > >> >> >         <ok to='end'/>
> > >> >> >         <error to='end'/>
> > >> >> >     </action>
> > >> >> >     <!--kill name='kill'>
> > >> >> >         <value>${wf:errorCode("wordcount")}</value>
> > >> >> >     </kill-->
> > >> >> >     <end name='end'/>
> > >> >> > </workflow-app>
> > >> >> >
> > >> >> > Here is the program -
> > >> >> > import java.io.IOException;
> > >> >> > import java.lang.InterruptedException;
> > >> >> > import java.util.StringTokenizer;
> > >> >> >
> > >> >> > import org.apache.hadoop.io.IntWritable;
> > >> >> > import org.apache.hadoop.io.Text;
> > >> >> > import org.apache.hadoop.conf.Configuration;
> > >> >> > import org.apache.hadoop.fs.Path;
> > >> >> > import org.apache.hadoop.mapreduce.Job;
> > >> >> > import org.apache.hadoop.mapreduce.Mapper;
> > >> >> > import org.apache.hadoop.mapreduce.Reducer;
> > >> >> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> > >> >> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> > >> >> > import org.apache.hadoop.util.GenericOptionsParser;
> > >> >> >
> > >> >> > public class WordCount {
> > >> >> > /**
> > >> >> >  * The map class of WordCount.
> > >> >> >  */
> > >> >> > public static class Map extends Mapper<Object, Text, Text,
> > >> IntWritable> {
> > >> >> >
> > >> >> >     private final static IntWritable one = new IntWritable(1);
> > >> >> >     private Text word = new Text();
> > >> >> >
> > >> >> >     public void map(Object key, Text value, Context context)
> > >> >> >         throws IOException, InterruptedException {
> > >> >> >         StringTokenizer itr = new
> > StringTokenizer(value.toString());
> > >> >> >         while (itr.hasMoreTokens()) {
> > >> >> >             word.set(itr.nextToken());
> > >> >> >             context.write(word, one);
> > >> >> >         }
> > >> >> >     }
> > >> >> > }
> > >> >> > /**
> > >> >> >  * The reducer class of WordCount
> > >> >> >  */
> > >> >> > public static class Reduce extends Reducer<Text, IntWritable,
> Text,
> > >> >> > IntWritable> {
> > >> >> >     public void reduce(Text key, Iterable<IntWritable> values,
> > >> Context
> > >> >> > context)
> > >> >> >         throws IOException, InterruptedException {
> > >> >> >         int sum = 0;
> > >> >> >         for (IntWritable value : values) {
> > >> >> >             sum += value.get();
> > >> >> >         }
> > >> >> >         context.write(key, new IntWritable(sum));
> > >> >> >     }
> > >> >> > }
> > >> >> > /**
> > >> >> >  * The main entry point.
> > >> >> >  */
> > >> >> > public static void main(String[] args) throws Exception {
> > >> >> >     Configuration conf = new Configuration();
> > >> >> >     Job job = new Job(conf, "WordCount");
> > >> >> >     job.setJarByClass(WordCount.class);
> > >> >> >     job.setMapperClass(Map.class);
> > >> >> >     job.setReducerClass(Reduce.class);
> > >> >> >     job.setOutputKeyClass(Text.class);
> > >> >> >     job.setOutputValueClass(IntWritable.class);
> > >> >> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
> > >> >> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
> > >> >> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
> > >> >> >   }
> > >> >> > }
> > >> >> >
> > >> >> > This is the error log -
> > >> >> >
> > >> >> > java.lang.RuntimeException: Error in configuring object
> > >> >> >         at
> > >> >>
> > >>
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
> > >> >> >         at
> > >> >>
> > org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
> > >> >> >         at
> > >> >>
> > >>
> >
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
> > >> >> >         at
> > >> >> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
> > >> >> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
> > >> >> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
> > >> >> >         at java.security.AccessController.doPrivileged(Native
> > Method)
> > >> >> >         at javax.security.auth.Subject.doAs(Subject.java:415)
> > >> >> >         at
> > >> >>
> > >>
> >
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
> > >> >> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
> > >> >> > Caused by: java.lang.reflect.InvocationTargetException
> > >> >> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native
> > >> Method)
> > >> >> >         at
> > >> >>
> > >>
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> > >> >> >         at
> > >> >>
> > >>
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> > >> >> >         at java.lang.reflect.Method.invoke(Method.java:601)
> > >> >> >         at
> > >> >>
> > >>
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
> > >> >> >         ... 9 more
> > >> >> > Caused by: java.lang.RuntimeException:
> java.lang.RuntimeException:
> > >> >> > java.lang.ClassNotFoundException: WordCount.Map
> > >> >> >         at
> > >> >>
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
> > >> >> >         at
> > >> >> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
> > >> >> >         at
> > >> >> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
> > >> >> >         ... 14 more
> > >> >> > Caused by: java.lang.RuntimeException:
> > >> >> > java.lang.ClassNotFoundException: WordCount.Map
> > >> >> >         at
> > >> >>
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
> > >> >> >         at
> > >> >>
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
> > >> >> >         ... 16 more
> > >> >> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
> > >> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> > >> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> > >> >> >         at java.security.AccessController.doPrivileged(Native
> > Method)
> > >> >> >         at
> > java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> > >> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
> > >> >> >         at
> > >> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> > >> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
> > >> >> >         at java.lang.Class.forName0(Native Method)
> > >> >> >         at java.lang.Class.forName(Class.java:264)
> > >> >> >         at
> > >> >>
> > >>
> >
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
> > >> >> >         at
> > >> >>
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
> > >> >> >         ... 17 more
> > >> >> >
> > >> >> >
> > >> >> > Here is the command which I am using to submit the workfloe -
> > >> >> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
> > >> >> > ~/mr/job.properties -run
> > >> >> >
> > >> >> > Need some help. Many thanks.
> > >> >> > (Please pardon my ignorance)
> > >> >> >
> > >> >> > Regards,
> > >> >> >     Mohammad Tariq
> > >> >>
> > >> >>
> > >> >>
> > >> >> --
> > >> >> Harsh J
> > >> >>
> > >>
> > >>
> > >>
> > >> --
> > >> Harsh J
> > >>
> > >
> > >
> >
>

Re: Unable to schedule my first job

Posted by Chris White <ch...@gmail.com>.
You should be able to run new API Map / Reducers - try setting the new-api
properties in your configuration set:

<property>
    <name>mapred.mapper.new-api</name>
    <value>true</value>
</property>
<property>
    <name>mapred.reducer.new-api</name>
    <value>true</value>
</property>


On Sat, Dec 8, 2012 at 4:11 PM, Mohammad Tariq <do...@gmail.com> wrote:

> No luck. Same error
>
> I'll try to dig further. Thanks a lot for the help.
>
> Regards,
>     Mohammad Tariq
>
>
>
> On Sun, Dec 9, 2012 at 2:12 AM, Mohammad Tariq <do...@gmail.com> wrote:
>
> > I see. Will give it a shot and see if that works.
> >
> > Thank you.
> >
> > Regards,
> >     Mohammad Tariq
> >
> >
> >
> > On Sun, Dec 9, 2012 at 2:10 AM, Harsh J <ha...@cloudera.com> wrote:
> >
> >> The combo is OK, but the use of the mapreduce API may not be.
> >>
> >> I'd try removing inner classes and placing them as regular Map and
> >> Reduce classes and reconfiguring/resubmitting.
> >>
> >> On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com>
> >> wrote:
> >> > Thank you so much for the quick response Harsh. Please find answers to
> >> your
> >> > questions below(in order) :
> >> >
> >> > 1- Yes, it works.
> >> > 2- Yes, jar contains all the classes.
> >> > 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
> >> > combination OK?
> >> >
> >> > Regards,
> >> >     Mohammad Tariq
> >> >
> >> >
> >> >
> >> > On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:
> >> >
> >> >> Hi,
> >> >>
> >> >> A couple of things:
> >> >>
> >> >> 1. Can you verify if running your job directly works?
> >> >> 2. Can you verify if your packaged jar does contain the classes?
> >> >> (using a command like jar -tvf <jar> should tell you).
> >> >> 2. The Oozie map-reduce action relies on the Stable API, not the New
> >> API.
> >> >>
> >> >> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com>
> >> wrote:
> >> >> > Hello list,
> >> >> >
> >> >> >          I have just started with Oozie and trying to follow the
> >> tutorial
> >> >> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I
> am
> >> >> facing
> >> >> > some issues as my "Mapper" class is not reachable(as per the JT
> >> logs).
> >> >> > Nothing complex, just tying to run the WordCount program. I have
> >> >> specified
> >> >> > the details below. in case anybody needs it :
> >> >> >
> >> >> > Location of the workflow components directory -
> >> >> > hdfs://localhost:9000/mr
> >> >> >
> >> >> > Contents inside /mr -
> >> >> > job.properties
> >> >> > workflow.xml
> >> >> > /lib
> >> >> > /lib/wc.jar
> >> >> >
> >> >> > Contents of job.properties -
> >> >> > nameNode=hdfs://localhost:9000
> >> >> > jobTracker=localhost:9001
> >> >> > queueName=default
> >> >> > examplesRoot=mr
> >> >> > oozie.wf.application.path=${nameNode}/${examplesRoot}
> >> >> > inputDir=/mapin
> >> >> > outputDir=/mapout
> >> >> >
> >> >> > Contents of workflow.xml -
> >> >> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
> >> >> >     <start to='wordcount'/>
> >> >> >     <action name='wordcount'>
> >> >> >         <map-reduce>
> >> >> >             <job-tracker>${jobTracker}</job-tracker>
> >> >> >             <name-node>${nameNode}</name-node>
> >> >> >             <prepare>
> >> >> >             </prepare>
> >> >> >             <configuration>
> >> >> >                 <property>
> >> >> >                     <name>mapred.job.queue.name</name>
> >> >> >                     <value>${queueName}</value>
> >> >> >                 </property>
> >> >> >                 <property>
> >> >> >                     <name>mapred.mapper.class</name>
> >> >> >                     <value>WordCount.Map</value>
> >> >> >                 </property>
> >> >> >                 <property>
> >> >> >                     <name>mapred.reducer.class</name>
> >> >> >                     <value>WordCount.Reduce</value>
> >> >> >                 </property>
> >> >> >                 <property>
> >> >> >                     <name>mapred.input.dir</name>
> >> >> >                     <value>${inputDir}</value>
> >> >> >                 </property>
> >> >> >                 <property>
> >> >> >                     <name>mapred.output.dir</name>
> >> >> >                     <value>${outputDir}</value>
> >> >> >                 </property>
> >> >> >             </configuration>
> >> >> >         </map-reduce>
> >> >> >         <ok to='end'/>
> >> >> >         <error to='end'/>
> >> >> >     </action>
> >> >> >     <!--kill name='kill'>
> >> >> >         <value>${wf:errorCode("wordcount")}</value>
> >> >> >     </kill-->
> >> >> >     <end name='end'/>
> >> >> > </workflow-app>
> >> >> >
> >> >> > Here is the program -
> >> >> > import java.io.IOException;
> >> >> > import java.lang.InterruptedException;
> >> >> > import java.util.StringTokenizer;
> >> >> >
> >> >> > import org.apache.hadoop.io.IntWritable;
> >> >> > import org.apache.hadoop.io.Text;
> >> >> > import org.apache.hadoop.conf.Configuration;
> >> >> > import org.apache.hadoop.fs.Path;
> >> >> > import org.apache.hadoop.mapreduce.Job;
> >> >> > import org.apache.hadoop.mapreduce.Mapper;
> >> >> > import org.apache.hadoop.mapreduce.Reducer;
> >> >> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> >> >> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> >> >> > import org.apache.hadoop.util.GenericOptionsParser;
> >> >> >
> >> >> > public class WordCount {
> >> >> > /**
> >> >> >  * The map class of WordCount.
> >> >> >  */
> >> >> > public static class Map extends Mapper<Object, Text, Text,
> >> IntWritable> {
> >> >> >
> >> >> >     private final static IntWritable one = new IntWritable(1);
> >> >> >     private Text word = new Text();
> >> >> >
> >> >> >     public void map(Object key, Text value, Context context)
> >> >> >         throws IOException, InterruptedException {
> >> >> >         StringTokenizer itr = new
> StringTokenizer(value.toString());
> >> >> >         while (itr.hasMoreTokens()) {
> >> >> >             word.set(itr.nextToken());
> >> >> >             context.write(word, one);
> >> >> >         }
> >> >> >     }
> >> >> > }
> >> >> > /**
> >> >> >  * The reducer class of WordCount
> >> >> >  */
> >> >> > public static class Reduce extends Reducer<Text, IntWritable, Text,
> >> >> > IntWritable> {
> >> >> >     public void reduce(Text key, Iterable<IntWritable> values,
> >> Context
> >> >> > context)
> >> >> >         throws IOException, InterruptedException {
> >> >> >         int sum = 0;
> >> >> >         for (IntWritable value : values) {
> >> >> >             sum += value.get();
> >> >> >         }
> >> >> >         context.write(key, new IntWritable(sum));
> >> >> >     }
> >> >> > }
> >> >> > /**
> >> >> >  * The main entry point.
> >> >> >  */
> >> >> > public static void main(String[] args) throws Exception {
> >> >> >     Configuration conf = new Configuration();
> >> >> >     Job job = new Job(conf, "WordCount");
> >> >> >     job.setJarByClass(WordCount.class);
> >> >> >     job.setMapperClass(Map.class);
> >> >> >     job.setReducerClass(Reduce.class);
> >> >> >     job.setOutputKeyClass(Text.class);
> >> >> >     job.setOutputValueClass(IntWritable.class);
> >> >> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
> >> >> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
> >> >> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
> >> >> >   }
> >> >> > }
> >> >> >
> >> >> > This is the error log -
> >> >> >
> >> >> > java.lang.RuntimeException: Error in configuring object
> >> >> >         at
> >> >>
> >>
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
> >> >> >         at
> >> >>
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
> >> >> >         at
> >> >>
> >>
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
> >> >> >         at
> >> >> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
> >> >> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
> >> >> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
> >> >> >         at java.security.AccessController.doPrivileged(Native
> Method)
> >> >> >         at javax.security.auth.Subject.doAs(Subject.java:415)
> >> >> >         at
> >> >>
> >>
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
> >> >> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
> >> >> > Caused by: java.lang.reflect.InvocationTargetException
> >> >> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native
> >> Method)
> >> >> >         at
> >> >>
> >>
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> >> >> >         at
> >> >>
> >>
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >> >> >         at java.lang.reflect.Method.invoke(Method.java:601)
> >> >> >         at
> >> >>
> >>
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
> >> >> >         ... 9 more
> >> >> > Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
> >> >> > java.lang.ClassNotFoundException: WordCount.Map
> >> >> >         at
> >> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
> >> >> >         at
> >> >> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
> >> >> >         at
> >> >> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
> >> >> >         ... 14 more
> >> >> > Caused by: java.lang.RuntimeException:
> >> >> > java.lang.ClassNotFoundException: WordCount.Map
> >> >> >         at
> >> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
> >> >> >         at
> >> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
> >> >> >         ... 16 more
> >> >> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
> >> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> >> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> >> >> >         at java.security.AccessController.doPrivileged(Native
> Method)
> >> >> >         at
> java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> >> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
> >> >> >         at
> >> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> >> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
> >> >> >         at java.lang.Class.forName0(Native Method)
> >> >> >         at java.lang.Class.forName(Class.java:264)
> >> >> >         at
> >> >>
> >>
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
> >> >> >         at
> >> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
> >> >> >         ... 17 more
> >> >> >
> >> >> >
> >> >> > Here is the command which I am using to submit the workfloe -
> >> >> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
> >> >> > ~/mr/job.properties -run
> >> >> >
> >> >> > Need some help. Many thanks.
> >> >> > (Please pardon my ignorance)
> >> >> >
> >> >> > Regards,
> >> >> >     Mohammad Tariq
> >> >>
> >> >>
> >> >>
> >> >> --
> >> >> Harsh J
> >> >>
> >>
> >>
> >>
> >> --
> >> Harsh J
> >>
> >
> >
>

Re: Unable to schedule my first job

Posted by Mohammad Tariq <do...@gmail.com>.
No luck. Same error

I'll try to dig further. Thanks a lot for the help.

Regards,
    Mohammad Tariq



On Sun, Dec 9, 2012 at 2:12 AM, Mohammad Tariq <do...@gmail.com> wrote:

> I see. Will give it a shot and see if that works.
>
> Thank you.
>
> Regards,
>     Mohammad Tariq
>
>
>
> On Sun, Dec 9, 2012 at 2:10 AM, Harsh J <ha...@cloudera.com> wrote:
>
>> The combo is OK, but the use of the mapreduce API may not be.
>>
>> I'd try removing inner classes and placing them as regular Map and
>> Reduce classes and reconfiguring/resubmitting.
>>
>> On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com>
>> wrote:
>> > Thank you so much for the quick response Harsh. Please find answers to
>> your
>> > questions below(in order) :
>> >
>> > 1- Yes, it works.
>> > 2- Yes, jar contains all the classes.
>> > 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
>> > combination OK?
>> >
>> > Regards,
>> >     Mohammad Tariq
>> >
>> >
>> >
>> > On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:
>> >
>> >> Hi,
>> >>
>> >> A couple of things:
>> >>
>> >> 1. Can you verify if running your job directly works?
>> >> 2. Can you verify if your packaged jar does contain the classes?
>> >> (using a command like jar -tvf <jar> should tell you).
>> >> 2. The Oozie map-reduce action relies on the Stable API, not the New
>> API.
>> >>
>> >> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com>
>> wrote:
>> >> > Hello list,
>> >> >
>> >> >          I have just started with Oozie and trying to follow the
>> tutorial
>> >> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am
>> >> facing
>> >> > some issues as my "Mapper" class is not reachable(as per the JT
>> logs).
>> >> > Nothing complex, just tying to run the WordCount program. I have
>> >> specified
>> >> > the details below. in case anybody needs it :
>> >> >
>> >> > Location of the workflow components directory -
>> >> > hdfs://localhost:9000/mr
>> >> >
>> >> > Contents inside /mr -
>> >> > job.properties
>> >> > workflow.xml
>> >> > /lib
>> >> > /lib/wc.jar
>> >> >
>> >> > Contents of job.properties -
>> >> > nameNode=hdfs://localhost:9000
>> >> > jobTracker=localhost:9001
>> >> > queueName=default
>> >> > examplesRoot=mr
>> >> > oozie.wf.application.path=${nameNode}/${examplesRoot}
>> >> > inputDir=/mapin
>> >> > outputDir=/mapout
>> >> >
>> >> > Contents of workflow.xml -
>> >> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
>> >> >     <start to='wordcount'/>
>> >> >     <action name='wordcount'>
>> >> >         <map-reduce>
>> >> >             <job-tracker>${jobTracker}</job-tracker>
>> >> >             <name-node>${nameNode}</name-node>
>> >> >             <prepare>
>> >> >             </prepare>
>> >> >             <configuration>
>> >> >                 <property>
>> >> >                     <name>mapred.job.queue.name</name>
>> >> >                     <value>${queueName}</value>
>> >> >                 </property>
>> >> >                 <property>
>> >> >                     <name>mapred.mapper.class</name>
>> >> >                     <value>WordCount.Map</value>
>> >> >                 </property>
>> >> >                 <property>
>> >> >                     <name>mapred.reducer.class</name>
>> >> >                     <value>WordCount.Reduce</value>
>> >> >                 </property>
>> >> >                 <property>
>> >> >                     <name>mapred.input.dir</name>
>> >> >                     <value>${inputDir}</value>
>> >> >                 </property>
>> >> >                 <property>
>> >> >                     <name>mapred.output.dir</name>
>> >> >                     <value>${outputDir}</value>
>> >> >                 </property>
>> >> >             </configuration>
>> >> >         </map-reduce>
>> >> >         <ok to='end'/>
>> >> >         <error to='end'/>
>> >> >     </action>
>> >> >     <!--kill name='kill'>
>> >> >         <value>${wf:errorCode("wordcount")}</value>
>> >> >     </kill-->
>> >> >     <end name='end'/>
>> >> > </workflow-app>
>> >> >
>> >> > Here is the program -
>> >> > import java.io.IOException;
>> >> > import java.lang.InterruptedException;
>> >> > import java.util.StringTokenizer;
>> >> >
>> >> > import org.apache.hadoop.io.IntWritable;
>> >> > import org.apache.hadoop.io.Text;
>> >> > import org.apache.hadoop.conf.Configuration;
>> >> > import org.apache.hadoop.fs.Path;
>> >> > import org.apache.hadoop.mapreduce.Job;
>> >> > import org.apache.hadoop.mapreduce.Mapper;
>> >> > import org.apache.hadoop.mapreduce.Reducer;
>> >> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
>> >> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
>> >> > import org.apache.hadoop.util.GenericOptionsParser;
>> >> >
>> >> > public class WordCount {
>> >> > /**
>> >> >  * The map class of WordCount.
>> >> >  */
>> >> > public static class Map extends Mapper<Object, Text, Text,
>> IntWritable> {
>> >> >
>> >> >     private final static IntWritable one = new IntWritable(1);
>> >> >     private Text word = new Text();
>> >> >
>> >> >     public void map(Object key, Text value, Context context)
>> >> >         throws IOException, InterruptedException {
>> >> >         StringTokenizer itr = new StringTokenizer(value.toString());
>> >> >         while (itr.hasMoreTokens()) {
>> >> >             word.set(itr.nextToken());
>> >> >             context.write(word, one);
>> >> >         }
>> >> >     }
>> >> > }
>> >> > /**
>> >> >  * The reducer class of WordCount
>> >> >  */
>> >> > public static class Reduce extends Reducer<Text, IntWritable, Text,
>> >> > IntWritable> {
>> >> >     public void reduce(Text key, Iterable<IntWritable> values,
>> Context
>> >> > context)
>> >> >         throws IOException, InterruptedException {
>> >> >         int sum = 0;
>> >> >         for (IntWritable value : values) {
>> >> >             sum += value.get();
>> >> >         }
>> >> >         context.write(key, new IntWritable(sum));
>> >> >     }
>> >> > }
>> >> > /**
>> >> >  * The main entry point.
>> >> >  */
>> >> > public static void main(String[] args) throws Exception {
>> >> >     Configuration conf = new Configuration();
>> >> >     Job job = new Job(conf, "WordCount");
>> >> >     job.setJarByClass(WordCount.class);
>> >> >     job.setMapperClass(Map.class);
>> >> >     job.setReducerClass(Reduce.class);
>> >> >     job.setOutputKeyClass(Text.class);
>> >> >     job.setOutputValueClass(IntWritable.class);
>> >> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
>> >> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
>> >> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
>> >> >   }
>> >> > }
>> >> >
>> >> > This is the error log -
>> >> >
>> >> > java.lang.RuntimeException: Error in configuring object
>> >> >         at
>> >>
>> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
>> >> >         at
>> >> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
>> >> >         at
>> >>
>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
>> >> >         at
>> >> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
>> >> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
>> >> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
>> >> >         at java.security.AccessController.doPrivileged(Native Method)
>> >> >         at javax.security.auth.Subject.doAs(Subject.java:415)
>> >> >         at
>> >>
>> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
>> >> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
>> >> > Caused by: java.lang.reflect.InvocationTargetException
>> >> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native
>> Method)
>> >> >         at
>> >>
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> >> >         at
>> >>
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> >> >         at java.lang.reflect.Method.invoke(Method.java:601)
>> >> >         at
>> >>
>> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
>> >> >         ... 9 more
>> >> > Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
>> >> > java.lang.ClassNotFoundException: WordCount.Map
>> >> >         at
>> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
>> >> >         at
>> >> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
>> >> >         at
>> >> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
>> >> >         ... 14 more
>> >> > Caused by: java.lang.RuntimeException:
>> >> > java.lang.ClassNotFoundException: WordCount.Map
>> >> >         at
>> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
>> >> >         at
>> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
>> >> >         ... 16 more
>> >> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
>> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
>> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
>> >> >         at java.security.AccessController.doPrivileged(Native Method)
>> >> >         at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
>> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
>> >> >         at
>> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
>> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
>> >> >         at java.lang.Class.forName0(Native Method)
>> >> >         at java.lang.Class.forName(Class.java:264)
>> >> >         at
>> >>
>> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
>> >> >         at
>> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
>> >> >         ... 17 more
>> >> >
>> >> >
>> >> > Here is the command which I am using to submit the workfloe -
>> >> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
>> >> > ~/mr/job.properties -run
>> >> >
>> >> > Need some help. Many thanks.
>> >> > (Please pardon my ignorance)
>> >> >
>> >> > Regards,
>> >> >     Mohammad Tariq
>> >>
>> >>
>> >>
>> >> --
>> >> Harsh J
>> >>
>>
>>
>>
>> --
>> Harsh J
>>
>
>

Re: Unable to schedule my first job

Posted by Mohammad Tariq <do...@gmail.com>.
I see. Will give it a shot and see if that works.

Thank you.

Regards,
    Mohammad Tariq



On Sun, Dec 9, 2012 at 2:10 AM, Harsh J <ha...@cloudera.com> wrote:

> The combo is OK, but the use of the mapreduce API may not be.
>
> I'd try removing inner classes and placing them as regular Map and
> Reduce classes and reconfiguring/resubmitting.
>
> On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com> wrote:
> > Thank you so much for the quick response Harsh. Please find answers to
> your
> > questions below(in order) :
> >
> > 1- Yes, it works.
> > 2- Yes, jar contains all the classes.
> > 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
> > combination OK?
> >
> > Regards,
> >     Mohammad Tariq
> >
> >
> >
> > On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:
> >
> >> Hi,
> >>
> >> A couple of things:
> >>
> >> 1. Can you verify if running your job directly works?
> >> 2. Can you verify if your packaged jar does contain the classes?
> >> (using a command like jar -tvf <jar> should tell you).
> >> 2. The Oozie map-reduce action relies on the Stable API, not the New
> API.
> >>
> >> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com>
> wrote:
> >> > Hello list,
> >> >
> >> >          I have just started with Oozie and trying to follow the
> tutorial
> >> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am
> >> facing
> >> > some issues as my "Mapper" class is not reachable(as per the JT logs).
> >> > Nothing complex, just tying to run the WordCount program. I have
> >> specified
> >> > the details below. in case anybody needs it :
> >> >
> >> > Location of the workflow components directory -
> >> > hdfs://localhost:9000/mr
> >> >
> >> > Contents inside /mr -
> >> > job.properties
> >> > workflow.xml
> >> > /lib
> >> > /lib/wc.jar
> >> >
> >> > Contents of job.properties -
> >> > nameNode=hdfs://localhost:9000
> >> > jobTracker=localhost:9001
> >> > queueName=default
> >> > examplesRoot=mr
> >> > oozie.wf.application.path=${nameNode}/${examplesRoot}
> >> > inputDir=/mapin
> >> > outputDir=/mapout
> >> >
> >> > Contents of workflow.xml -
> >> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
> >> >     <start to='wordcount'/>
> >> >     <action name='wordcount'>
> >> >         <map-reduce>
> >> >             <job-tracker>${jobTracker}</job-tracker>
> >> >             <name-node>${nameNode}</name-node>
> >> >             <prepare>
> >> >             </prepare>
> >> >             <configuration>
> >> >                 <property>
> >> >                     <name>mapred.job.queue.name</name>
> >> >                     <value>${queueName}</value>
> >> >                 </property>
> >> >                 <property>
> >> >                     <name>mapred.mapper.class</name>
> >> >                     <value>WordCount.Map</value>
> >> >                 </property>
> >> >                 <property>
> >> >                     <name>mapred.reducer.class</name>
> >> >                     <value>WordCount.Reduce</value>
> >> >                 </property>
> >> >                 <property>
> >> >                     <name>mapred.input.dir</name>
> >> >                     <value>${inputDir}</value>
> >> >                 </property>
> >> >                 <property>
> >> >                     <name>mapred.output.dir</name>
> >> >                     <value>${outputDir}</value>
> >> >                 </property>
> >> >             </configuration>
> >> >         </map-reduce>
> >> >         <ok to='end'/>
> >> >         <error to='end'/>
> >> >     </action>
> >> >     <!--kill name='kill'>
> >> >         <value>${wf:errorCode("wordcount")}</value>
> >> >     </kill-->
> >> >     <end name='end'/>
> >> > </workflow-app>
> >> >
> >> > Here is the program -
> >> > import java.io.IOException;
> >> > import java.lang.InterruptedException;
> >> > import java.util.StringTokenizer;
> >> >
> >> > import org.apache.hadoop.io.IntWritable;
> >> > import org.apache.hadoop.io.Text;
> >> > import org.apache.hadoop.conf.Configuration;
> >> > import org.apache.hadoop.fs.Path;
> >> > import org.apache.hadoop.mapreduce.Job;
> >> > import org.apache.hadoop.mapreduce.Mapper;
> >> > import org.apache.hadoop.mapreduce.Reducer;
> >> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> >> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> >> > import org.apache.hadoop.util.GenericOptionsParser;
> >> >
> >> > public class WordCount {
> >> > /**
> >> >  * The map class of WordCount.
> >> >  */
> >> > public static class Map extends Mapper<Object, Text, Text,
> IntWritable> {
> >> >
> >> >     private final static IntWritable one = new IntWritable(1);
> >> >     private Text word = new Text();
> >> >
> >> >     public void map(Object key, Text value, Context context)
> >> >         throws IOException, InterruptedException {
> >> >         StringTokenizer itr = new StringTokenizer(value.toString());
> >> >         while (itr.hasMoreTokens()) {
> >> >             word.set(itr.nextToken());
> >> >             context.write(word, one);
> >> >         }
> >> >     }
> >> > }
> >> > /**
> >> >  * The reducer class of WordCount
> >> >  */
> >> > public static class Reduce extends Reducer<Text, IntWritable, Text,
> >> > IntWritable> {
> >> >     public void reduce(Text key, Iterable<IntWritable> values, Context
> >> > context)
> >> >         throws IOException, InterruptedException {
> >> >         int sum = 0;
> >> >         for (IntWritable value : values) {
> >> >             sum += value.get();
> >> >         }
> >> >         context.write(key, new IntWritable(sum));
> >> >     }
> >> > }
> >> > /**
> >> >  * The main entry point.
> >> >  */
> >> > public static void main(String[] args) throws Exception {
> >> >     Configuration conf = new Configuration();
> >> >     Job job = new Job(conf, "WordCount");
> >> >     job.setJarByClass(WordCount.class);
> >> >     job.setMapperClass(Map.class);
> >> >     job.setReducerClass(Reduce.class);
> >> >     job.setOutputKeyClass(Text.class);
> >> >     job.setOutputValueClass(IntWritable.class);
> >> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
> >> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
> >> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
> >> >   }
> >> > }
> >> >
> >> > This is the error log -
> >> >
> >> > java.lang.RuntimeException: Error in configuring object
> >> >         at
> >>
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
> >> >         at
> >> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
> >> >         at
> >>
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
> >> >         at
> >> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
> >> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
> >> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
> >> >         at java.security.AccessController.doPrivileged(Native Method)
> >> >         at javax.security.auth.Subject.doAs(Subject.java:415)
> >> >         at
> >>
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
> >> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
> >> > Caused by: java.lang.reflect.InvocationTargetException
> >> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >> >         at
> >>
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> >> >         at
> >>
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >> >         at java.lang.reflect.Method.invoke(Method.java:601)
> >> >         at
> >>
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
> >> >         ... 9 more
> >> > Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
> >> > java.lang.ClassNotFoundException: WordCount.Map
> >> >         at
> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
> >> >         at
> >> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
> >> >         at
> >> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
> >> >         ... 14 more
> >> > Caused by: java.lang.RuntimeException:
> >> > java.lang.ClassNotFoundException: WordCount.Map
> >> >         at
> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
> >> >         at
> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
> >> >         ... 16 more
> >> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> >> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> >> >         at java.security.AccessController.doPrivileged(Native Method)
> >> >         at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
> >> >         at
> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> >> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
> >> >         at java.lang.Class.forName0(Native Method)
> >> >         at java.lang.Class.forName(Class.java:264)
> >> >         at
> >>
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
> >> >         at
> >> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
> >> >         ... 17 more
> >> >
> >> >
> >> > Here is the command which I am using to submit the workfloe -
> >> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
> >> > ~/mr/job.properties -run
> >> >
> >> > Need some help. Many thanks.
> >> > (Please pardon my ignorance)
> >> >
> >> > Regards,
> >> >     Mohammad Tariq
> >>
> >>
> >>
> >> --
> >> Harsh J
> >>
>
>
>
> --
> Harsh J
>

Re: Unable to schedule my first job

Posted by Harsh J <ha...@cloudera.com>.
The combo is OK, but the use of the mapreduce API may not be.

I'd try removing inner classes and placing them as regular Map and
Reduce classes and reconfiguring/resubmitting.

On Sun, Dec 9, 2012 at 2:06 AM, Mohammad Tariq <do...@gmail.com> wrote:
> Thank you so much for the quick response Harsh. Please find answers to your
> questions below(in order) :
>
> 1- Yes, it works.
> 2- Yes, jar contains all the classes.
> 3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
> combination OK?
>
> Regards,
>     Mohammad Tariq
>
>
>
> On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:
>
>> Hi,
>>
>> A couple of things:
>>
>> 1. Can you verify if running your job directly works?
>> 2. Can you verify if your packaged jar does contain the classes?
>> (using a command like jar -tvf <jar> should tell you).
>> 2. The Oozie map-reduce action relies on the Stable API, not the New API.
>>
>> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com> wrote:
>> > Hello list,
>> >
>> >          I have just started with Oozie and trying to follow the tutorial
>> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am
>> facing
>> > some issues as my "Mapper" class is not reachable(as per the JT logs).
>> > Nothing complex, just tying to run the WordCount program. I have
>> specified
>> > the details below. in case anybody needs it :
>> >
>> > Location of the workflow components directory -
>> > hdfs://localhost:9000/mr
>> >
>> > Contents inside /mr -
>> > job.properties
>> > workflow.xml
>> > /lib
>> > /lib/wc.jar
>> >
>> > Contents of job.properties -
>> > nameNode=hdfs://localhost:9000
>> > jobTracker=localhost:9001
>> > queueName=default
>> > examplesRoot=mr
>> > oozie.wf.application.path=${nameNode}/${examplesRoot}
>> > inputDir=/mapin
>> > outputDir=/mapout
>> >
>> > Contents of workflow.xml -
>> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
>> >     <start to='wordcount'/>
>> >     <action name='wordcount'>
>> >         <map-reduce>
>> >             <job-tracker>${jobTracker}</job-tracker>
>> >             <name-node>${nameNode}</name-node>
>> >             <prepare>
>> >             </prepare>
>> >             <configuration>
>> >                 <property>
>> >                     <name>mapred.job.queue.name</name>
>> >                     <value>${queueName}</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.mapper.class</name>
>> >                     <value>WordCount.Map</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.reducer.class</name>
>> >                     <value>WordCount.Reduce</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.input.dir</name>
>> >                     <value>${inputDir}</value>
>> >                 </property>
>> >                 <property>
>> >                     <name>mapred.output.dir</name>
>> >                     <value>${outputDir}</value>
>> >                 </property>
>> >             </configuration>
>> >         </map-reduce>
>> >         <ok to='end'/>
>> >         <error to='end'/>
>> >     </action>
>> >     <!--kill name='kill'>
>> >         <value>${wf:errorCode("wordcount")}</value>
>> >     </kill-->
>> >     <end name='end'/>
>> > </workflow-app>
>> >
>> > Here is the program -
>> > import java.io.IOException;
>> > import java.lang.InterruptedException;
>> > import java.util.StringTokenizer;
>> >
>> > import org.apache.hadoop.io.IntWritable;
>> > import org.apache.hadoop.io.Text;
>> > import org.apache.hadoop.conf.Configuration;
>> > import org.apache.hadoop.fs.Path;
>> > import org.apache.hadoop.mapreduce.Job;
>> > import org.apache.hadoop.mapreduce.Mapper;
>> > import org.apache.hadoop.mapreduce.Reducer;
>> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
>> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
>> > import org.apache.hadoop.util.GenericOptionsParser;
>> >
>> > public class WordCount {
>> > /**
>> >  * The map class of WordCount.
>> >  */
>> > public static class Map extends Mapper<Object, Text, Text, IntWritable> {
>> >
>> >     private final static IntWritable one = new IntWritable(1);
>> >     private Text word = new Text();
>> >
>> >     public void map(Object key, Text value, Context context)
>> >         throws IOException, InterruptedException {
>> >         StringTokenizer itr = new StringTokenizer(value.toString());
>> >         while (itr.hasMoreTokens()) {
>> >             word.set(itr.nextToken());
>> >             context.write(word, one);
>> >         }
>> >     }
>> > }
>> > /**
>> >  * The reducer class of WordCount
>> >  */
>> > public static class Reduce extends Reducer<Text, IntWritable, Text,
>> > IntWritable> {
>> >     public void reduce(Text key, Iterable<IntWritable> values, Context
>> > context)
>> >         throws IOException, InterruptedException {
>> >         int sum = 0;
>> >         for (IntWritable value : values) {
>> >             sum += value.get();
>> >         }
>> >         context.write(key, new IntWritable(sum));
>> >     }
>> > }
>> > /**
>> >  * The main entry point.
>> >  */
>> > public static void main(String[] args) throws Exception {
>> >     Configuration conf = new Configuration();
>> >     Job job = new Job(conf, "WordCount");
>> >     job.setJarByClass(WordCount.class);
>> >     job.setMapperClass(Map.class);
>> >     job.setReducerClass(Reduce.class);
>> >     job.setOutputKeyClass(Text.class);
>> >     job.setOutputValueClass(IntWritable.class);
>> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
>> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
>> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
>> >   }
>> > }
>> >
>> > This is the error log -
>> >
>> > java.lang.RuntimeException: Error in configuring object
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
>> >         at
>> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
>> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
>> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
>> >         at java.security.AccessController.doPrivileged(Native Method)
>> >         at javax.security.auth.Subject.doAs(Subject.java:415)
>> >         at
>> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
>> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
>> > Caused by: java.lang.reflect.InvocationTargetException
>> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> >         at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> >         at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> >         at java.lang.reflect.Method.invoke(Method.java:601)
>> >         at
>> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
>> >         ... 9 more
>> > Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
>> > java.lang.ClassNotFoundException: WordCount.Map
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
>> >         at
>> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
>> >         at
>> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
>> >         ... 14 more
>> > Caused by: java.lang.RuntimeException:
>> > java.lang.ClassNotFoundException: WordCount.Map
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
>> >         ... 16 more
>> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
>> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
>> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
>> >         at java.security.AccessController.doPrivileged(Native Method)
>> >         at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
>> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
>> >         at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
>> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
>> >         at java.lang.Class.forName0(Native Method)
>> >         at java.lang.Class.forName(Class.java:264)
>> >         at
>> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
>> >         at
>> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
>> >         ... 17 more
>> >
>> >
>> > Here is the command which I am using to submit the workfloe -
>> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
>> > ~/mr/job.properties -run
>> >
>> > Need some help. Many thanks.
>> > (Please pardon my ignorance)
>> >
>> > Regards,
>> >     Mohammad Tariq
>>
>>
>>
>> --
>> Harsh J
>>



-- 
Harsh J

Re: Unable to schedule my first job

Posted by Mohammad Tariq <do...@gmail.com>.
Thank you so much for the quick response Harsh. Please find answers to your
questions below(in order) :

1- Yes, it works.
2- Yes, jar contains all the classes.
3- I am trying to use oozie-3.2.0-incubating + hadoop-1.0.4. Is this
combination OK?

Regards,
    Mohammad Tariq



On Sun, Dec 9, 2012 at 1:59 AM, Harsh J <ha...@cloudera.com> wrote:

> Hi,
>
> A couple of things:
>
> 1. Can you verify if running your job directly works?
> 2. Can you verify if your packaged jar does contain the classes?
> (using a command like jar -tvf <jar> should tell you).
> 2. The Oozie map-reduce action relies on the Stable API, not the New API.
>
> On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com> wrote:
> > Hello list,
> >
> >          I have just started with Oozie and trying to follow the tutorial
> > at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am
> facing
> > some issues as my "Mapper" class is not reachable(as per the JT logs).
> > Nothing complex, just tying to run the WordCount program. I have
> specified
> > the details below. in case anybody needs it :
> >
> > Location of the workflow components directory -
> > hdfs://localhost:9000/mr
> >
> > Contents inside /mr -
> > job.properties
> > workflow.xml
> > /lib
> > /lib/wc.jar
> >
> > Contents of job.properties -
> > nameNode=hdfs://localhost:9000
> > jobTracker=localhost:9001
> > queueName=default
> > examplesRoot=mr
> > oozie.wf.application.path=${nameNode}/${examplesRoot}
> > inputDir=/mapin
> > outputDir=/mapout
> >
> > Contents of workflow.xml -
> > <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
> >     <start to='wordcount'/>
> >     <action name='wordcount'>
> >         <map-reduce>
> >             <job-tracker>${jobTracker}</job-tracker>
> >             <name-node>${nameNode}</name-node>
> >             <prepare>
> >             </prepare>
> >             <configuration>
> >                 <property>
> >                     <name>mapred.job.queue.name</name>
> >                     <value>${queueName}</value>
> >                 </property>
> >                 <property>
> >                     <name>mapred.mapper.class</name>
> >                     <value>WordCount.Map</value>
> >                 </property>
> >                 <property>
> >                     <name>mapred.reducer.class</name>
> >                     <value>WordCount.Reduce</value>
> >                 </property>
> >                 <property>
> >                     <name>mapred.input.dir</name>
> >                     <value>${inputDir}</value>
> >                 </property>
> >                 <property>
> >                     <name>mapred.output.dir</name>
> >                     <value>${outputDir}</value>
> >                 </property>
> >             </configuration>
> >         </map-reduce>
> >         <ok to='end'/>
> >         <error to='end'/>
> >     </action>
> >     <!--kill name='kill'>
> >         <value>${wf:errorCode("wordcount")}</value>
> >     </kill-->
> >     <end name='end'/>
> > </workflow-app>
> >
> > Here is the program -
> > import java.io.IOException;
> > import java.lang.InterruptedException;
> > import java.util.StringTokenizer;
> >
> > import org.apache.hadoop.io.IntWritable;
> > import org.apache.hadoop.io.Text;
> > import org.apache.hadoop.conf.Configuration;
> > import org.apache.hadoop.fs.Path;
> > import org.apache.hadoop.mapreduce.Job;
> > import org.apache.hadoop.mapreduce.Mapper;
> > import org.apache.hadoop.mapreduce.Reducer;
> > import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> > import org.apache.hadoop.util.GenericOptionsParser;
> >
> > public class WordCount {
> > /**
> >  * The map class of WordCount.
> >  */
> > public static class Map extends Mapper<Object, Text, Text, IntWritable> {
> >
> >     private final static IntWritable one = new IntWritable(1);
> >     private Text word = new Text();
> >
> >     public void map(Object key, Text value, Context context)
> >         throws IOException, InterruptedException {
> >         StringTokenizer itr = new StringTokenizer(value.toString());
> >         while (itr.hasMoreTokens()) {
> >             word.set(itr.nextToken());
> >             context.write(word, one);
> >         }
> >     }
> > }
> > /**
> >  * The reducer class of WordCount
> >  */
> > public static class Reduce extends Reducer<Text, IntWritable, Text,
> > IntWritable> {
> >     public void reduce(Text key, Iterable<IntWritable> values, Context
> > context)
> >         throws IOException, InterruptedException {
> >         int sum = 0;
> >         for (IntWritable value : values) {
> >             sum += value.get();
> >         }
> >         context.write(key, new IntWritable(sum));
> >     }
> > }
> > /**
> >  * The main entry point.
> >  */
> > public static void main(String[] args) throws Exception {
> >     Configuration conf = new Configuration();
> >     Job job = new Job(conf, "WordCount");
> >     job.setJarByClass(WordCount.class);
> >     job.setMapperClass(Map.class);
> >     job.setReducerClass(Reduce.class);
> >     job.setOutputKeyClass(Text.class);
> >     job.setOutputValueClass(IntWritable.class);
> >     FileInputFormat.addInputPath(job, new Path("/mapin/"));
> >     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
> >     System.exit(job.waitForCompletion(true) ? 0 : 1);
> >   }
> > }
> >
> > This is the error log -
> >
> > java.lang.RuntimeException: Error in configuring object
> >         at
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
> >         at
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
> >         at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
> >         at
> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
> >         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
> >         at java.security.AccessController.doPrivileged(Native Method)
> >         at javax.security.auth.Subject.doAs(Subject.java:415)
> >         at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
> >         at org.apache.hadoop.mapred.Child.main(Child.java:249)
> > Caused by: java.lang.reflect.InvocationTargetException
> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >         at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> >         at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >         at java.lang.reflect.Method.invoke(Method.java:601)
> >         at
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
> >         ... 9 more
> > Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
> > java.lang.ClassNotFoundException: WordCount.Map
> >         at
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
> >         at
> org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
> >         at
> org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
> >         ... 14 more
> > Caused by: java.lang.RuntimeException:
> > java.lang.ClassNotFoundException: WordCount.Map
> >         at
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
> >         at
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
> >         ... 16 more
> > Caused by: java.lang.ClassNotFoundException: WordCount.Map
> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> >         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> >         at java.security.AccessController.doPrivileged(Native Method)
> >         at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
> >         at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> >         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
> >         at java.lang.Class.forName0(Native Method)
> >         at java.lang.Class.forName(Class.java:264)
> >         at
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
> >         at
> org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
> >         ... 17 more
> >
> >
> > Here is the command which I am using to submit the workfloe -
> > bin/oozie job -oozie http://localhost:11000/oozie/ -config
> > ~/mr/job.properties -run
> >
> > Need some help. Many thanks.
> > (Please pardon my ignorance)
> >
> > Regards,
> >     Mohammad Tariq
>
>
>
> --
> Harsh J
>

Re: Unable to schedule my first job

Posted by Harsh J <ha...@cloudera.com>.
Hi,

A couple of things:

1. Can you verify if running your job directly works?
2. Can you verify if your packaged jar does contain the classes?
(using a command like jar -tvf <jar> should tell you).
2. The Oozie map-reduce action relies on the Stable API, not the New API.

On Sun, Dec 9, 2012 at 1:10 AM, Mohammad Tariq <do...@gmail.com> wrote:
> Hello list,
>
>          I have just started with Oozie and trying to follow the tutorial
> at https://cwiki.apache.org/OOZIE/map-reduce-cookbook.html but I am facing
> some issues as my "Mapper" class is not reachable(as per the JT logs).
> Nothing complex, just tying to run the WordCount program. I have specified
> the details below. in case anybody needs it :
>
> Location of the workflow components directory -
> hdfs://localhost:9000/mr
>
> Contents inside /mr -
> job.properties
> workflow.xml
> /lib
> /lib/wc.jar
>
> Contents of job.properties -
> nameNode=hdfs://localhost:9000
> jobTracker=localhost:9001
> queueName=default
> examplesRoot=mr
> oozie.wf.application.path=${nameNode}/${examplesRoot}
> inputDir=/mapin
> outputDir=/mapout
>
> Contents of workflow.xml -
> <workflow-app name='wordcount-wf' xmlns="uri:oozie:workflow:0.2">
>     <start to='wordcount'/>
>     <action name='wordcount'>
>         <map-reduce>
>             <job-tracker>${jobTracker}</job-tracker>
>             <name-node>${nameNode}</name-node>
>             <prepare>
>             </prepare>
>             <configuration>
>                 <property>
>                     <name>mapred.job.queue.name</name>
>                     <value>${queueName}</value>
>                 </property>
>                 <property>
>                     <name>mapred.mapper.class</name>
>                     <value>WordCount.Map</value>
>                 </property>
>                 <property>
>                     <name>mapred.reducer.class</name>
>                     <value>WordCount.Reduce</value>
>                 </property>
>                 <property>
>                     <name>mapred.input.dir</name>
>                     <value>${inputDir}</value>
>                 </property>
>                 <property>
>                     <name>mapred.output.dir</name>
>                     <value>${outputDir}</value>
>                 </property>
>             </configuration>
>         </map-reduce>
>         <ok to='end'/>
>         <error to='end'/>
>     </action>
>     <!--kill name='kill'>
>         <value>${wf:errorCode("wordcount")}</value>
>     </kill-->
>     <end name='end'/>
> </workflow-app>
>
> Here is the program -
> import java.io.IOException;
> import java.lang.InterruptedException;
> import java.util.StringTokenizer;
>
> import org.apache.hadoop.io.IntWritable;
> import org.apache.hadoop.io.Text;
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.mapreduce.Job;
> import org.apache.hadoop.mapreduce.Mapper;
> import org.apache.hadoop.mapreduce.Reducer;
> import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> import org.apache.hadoop.util.GenericOptionsParser;
>
> public class WordCount {
> /**
>  * The map class of WordCount.
>  */
> public static class Map extends Mapper<Object, Text, Text, IntWritable> {
>
>     private final static IntWritable one = new IntWritable(1);
>     private Text word = new Text();
>
>     public void map(Object key, Text value, Context context)
>         throws IOException, InterruptedException {
>         StringTokenizer itr = new StringTokenizer(value.toString());
>         while (itr.hasMoreTokens()) {
>             word.set(itr.nextToken());
>             context.write(word, one);
>         }
>     }
> }
> /**
>  * The reducer class of WordCount
>  */
> public static class Reduce extends Reducer<Text, IntWritable, Text,
> IntWritable> {
>     public void reduce(Text key, Iterable<IntWritable> values, Context
> context)
>         throws IOException, InterruptedException {
>         int sum = 0;
>         for (IntWritable value : values) {
>             sum += value.get();
>         }
>         context.write(key, new IntWritable(sum));
>     }
> }
> /**
>  * The main entry point.
>  */
> public static void main(String[] args) throws Exception {
>     Configuration conf = new Configuration();
>     Job job = new Job(conf, "WordCount");
>     job.setJarByClass(WordCount.class);
>     job.setMapperClass(Map.class);
>     job.setReducerClass(Reduce.class);
>     job.setOutputKeyClass(Text.class);
>     job.setOutputValueClass(IntWritable.class);
>     FileInputFormat.addInputPath(job, new Path("/mapin/"));
>     FileOutputFormat.setOutputPath(job, new Path("/mapout/"));
>     System.exit(job.waitForCompletion(true) ? 0 : 1);
>   }
> }
>
> This is the error log -
>
> java.lang.RuntimeException: Error in configuring object
>         at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
>         at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
>         at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
>         at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
>         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
>         at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:415)
>         at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
>         at org.apache.hadoop.mapred.Child.main(Child.java:249)
> Caused by: java.lang.reflect.InvocationTargetException
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:601)
>         at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
>         ... 9 more
> Caused by: java.lang.RuntimeException: java.lang.RuntimeException:
> java.lang.ClassNotFoundException: WordCount.Map
>         at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:899)
>         at org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947)
>         at org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
>         ... 14 more
> Caused by: java.lang.RuntimeException:
> java.lang.ClassNotFoundException: WordCount.Map
>         at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:867)
>         at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891)
>         ... 16 more
> Caused by: java.lang.ClassNotFoundException: WordCount.Map
>         at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
>         at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
>         at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
>         at java.lang.Class.forName0(Native Method)
>         at java.lang.Class.forName(Class.java:264)
>         at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820)
>         at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865)
>         ... 17 more
>
>
> Here is the command which I am using to submit the workfloe -
> bin/oozie job -oozie http://localhost:11000/oozie/ -config
> ~/mr/job.properties -run
>
> Need some help. Many thanks.
> (Please pardon my ignorance)
>
> Regards,
>     Mohammad Tariq



-- 
Harsh J