You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@nutch.apache.org by Uygar BAYAR <uy...@beriltech.com> on 2007/10/04 09:59:08 UTC

Re: Problems running multiple nutch nodes

hi 
yes i get this error also when i parse a segment..( i have 4 nodes 1
namenode) 
105 maps 15 reduce 

./crawler1/bin/nutch parse sirketce/crawled/segments/20071002163239
task_0001_m_000070_0: log4j:ERROR setFile(null,true) call failed.
task_0001_m_000070_0: java.io.FileNotFoundException:
/home/nutch/crawler1/logs (Is a directory)
task_0001_m_000070_0:   at java.io.FileOutputStream.openAppend(Native
Method)
task_0001_m_000070_0:   at
java.io.FileOutputStream.<init>(FileOutputStream.java:177)
task_0001_m_000070_0:   at
java.io.FileOutputStream.<init>(FileOutputStream.java:102)
task_0001_m_000070_0:   at
org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
task_0001_m_000070_0:   at
org.apache.log4j.FileAppender.activateOptions(FileAppender.java:163)
task_0001_m_000070_0:   at
org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:215)
task_0001_m_000070_0:   at
org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:256)
task_0001_m_000070_0:   at
org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:132)
task_0001_m_000070_0:   at
org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:96)
task_0001_m_000070_0:   at
org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:654)
task_0001_m_000070_0:   at
org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:612)
task_0001_m_000070_0:   at
org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:509)
task_0001_m_000070_0:   at
org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:415)
task_0001_m_000070_0:   at
org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:441)
task_0001_m_000070_0:   at
org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:468)
task_0001_m_000070_0:   at
org.apache.log4j.LogManager.<clinit>(LogManager.java:122)
task_0001_m_000070_0:   at
org.apache.log4j.Logger.getLogger(Logger.java:104)
task_0001_m_000070_0:   at
org.apache.commons.logging.impl.Log4JLogger.getLogger(Log4JLogger.java:229)
task_0001_m_000070_0:   at
org.apache.commons.logging.impl.Log4JLogger.<init>(Log4JLogger.java:65)
task_0001_m_000070_0:   at
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
task_0001_m_000070_0:   at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
task_0001_m_000070_0:   at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
task_0001_m_000070_0:   at
java.lang.reflect.Constructor.newInstance(Constructor.java:494)
task_0001_m_000070_0:   at
org.apache.commons.logging.impl.LogFactoryImpl.newInstance(LogFactoryImpl.java:529)
task_0001_m_000070_0:   at
org.apache.commons.logging.impl.LogFactoryImpl.getInstance(LogFactoryImpl.java:235)
task_0001_m_000070_0:   at
org.apache.commons.logging.LogFactory.getLog(LogFactory.java:370)
task_0001_m_000070_0:   at
org.apache.hadoop.mapred.TaskTracker.<clinit>(TaskTracker.java:84)
task_0001_m_000070_0:   at
org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:1685)
task_0001_m_000070_0: log4j:ERROR Either File or DatePattern options are not
set for appender [DRFA].
task_0001_m_000070_1: log4j:ERROR setFile(null,true) call failed.
task_0001_m_000070_1: java.io.FileNotFoundException:
/home/nutch/crawler1/logs (Is a directory)
task_0001_m_000070_1:   at java.io.FileOutputStream.openAppend(Native
Method)
task_0001_m_000070_1:   at
java.io.FileOutputStream.<init>(FileOutputStream.java:177)
task_0001_m_000070_1:   at
java.io.FileOutputStream.<init>(FileOutputStream.java:102)
task_0001_m_000070_1:   at
org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
task_0001_m_000070_1:   at
org.apache.log4j.FileAppender.activateOptions(FileAppender.java:163)
task_0001_m_000070_1:   at
org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:215)
task_0001_m_000070_1:   at
org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:256)
task_0001_m_000070_1:   at
org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:132)
task_0001_m_000070_1:   at
org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:96)
task_0001_m_000070_1:   at
org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:654)
task_0001_m_000070_1:   at
org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:612)
task_0001_m_000070_1:   at
org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:509)
task_0001_m_000070_1:   at
org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:415)
task_0001_m_000070_1:   at
org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:441)
task_0001_m_000070_1:   at
org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:468)
task_0001_m_000070_1:   at
org.apache.log4j.LogManager.<clinit>(LogManager.java:122)
task_0001_m_000070_1:   at
org.apache.log4j.Logger.getLogger(Logger.java:104)
task_0001_m_000070_1:   at
org.apache.commons.logging.impl.Log4JLogger.getLogger(Log4JLogger.java:229)
task_0001_m_000070_1:   at
org.apache.commons.logging.impl.Log4JLogger.<init>(Log4JLogger.java:65)
task_0001_m_000070_1:   at
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
task_0001_m_000070_1:   at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
task_0001_m_000070_1:   at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
task_0001_m_000070_1:   at
java.lang.reflect.Constructor.newInstance(Constructor.java:494)
task_0001_m_000070_1:   at
org.apache.commons.logging.impl.LogFactoryImpl.newInstance(LogFactoryImpl.java:529)
task_0001_m_000070_1:   at
org.apache.commons.logging.impl.LogFactoryImpl.getInstance(LogFactoryImpl.java:235)
task_0001_m_000070_1:   at
org.apache.commons.logging.LogFactory.getLog(LogFactory.java:370)
task_0001_m_000070_1:   at
org.apache.hadoop.mapred.TaskTracker.<clinit>(TaskTracker.java:84)
task_0001_m_000070_1:   at
org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:1685)
task_0001_m_000070_1: log4j:ERROR Either File or DatePattern options are not
set for ap

vikasran wrote:
> 
> I am running into few issues running nutch with distributed hadoop on 2
> nodes:
> 
> Configuration:
> 2 nodes. One is master+slave, second node is just slave
> 
> I set mapred.map.tasks and mapred.reduce.tasks to 2.
> 
> Crawl works fine on single node (only one node acting as master+slave).
> When I add second node to conf/slaves file, crawl fails with message::
> Stopping at depth=0 - no more URLs to fetch
> 
> Please help. I am also seeing log4j error ::
> log4j:ERROR setFile(null,true) call failed.
> java.io.FileNotFoundException: /nutch/search/logs (Is a directory)
>         at java.io.FileOutputStream.openAppend(Native Method)
>         at java.io.FileOutputStream.<init>(FileOutputStream.java:177)
>         at java.io.FileOutputStream.<init>(FileOutputStream.java:102)
>         at org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
> 
> 
> PLEASE HELP
> 

-- 
View this message in context: http://www.nabble.com/Problems-running-multiple-nutch-nodes-tf4512336.html#a13034350
Sent from the Nutch - User mailing list archive at Nabble.com.


Re: Problems running multiple nutch nodes

Posted by Sami Siren <ss...@gmail.com>.
Uygar BAYAR wrote:
> hi 
>   thanks for the solution..it's solved my log problem but not my
> http://www.nabble.com/java.lang.OutOfMemoryError%3A-Requested-array-size-exceeds-VM-limit-tf4562352.html
> and gives this error message
> 
> Exception in thread "main" java.io.IOException: Job failed!
>         at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:604)
>         at org.apache.nutch.parse.ParseSegment.parse(ParseSegment.java:131)
>         at org.apache.nutch.parse.ParseSegment.main(ParseSegment.java:149)
> 

if it works on local jobrunner you possibly forgot to increase memory
for spawned vm processes with hadoop conf like:

<property>
<name>mapred.child.java.opts</name>
<value>-Xmx1000m</value>
</property>

--
 Sami Siren


Re: Problems running multiple nutch nodes

Posted by Uygar BAYAR <uy...@beriltech.com>.
hi 
  thanks for the solution..it's solved my log problem but not my
http://www.nabble.com/java.lang.OutOfMemoryError%3A-Requested-array-size-exceeds-VM-limit-tf4562352.html
and gives this error message

Exception in thread "main" java.io.IOException: Job failed!
        at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:604)
        at org.apache.nutch.parse.ParseSegment.parse(ParseSegment.java:131)
        at org.apache.nutch.parse.ParseSegment.main(ParseSegment.java:149)


Doğacan Güney-3 wrote:
> 
> Hi,
> 
> Can you try the solution at
> https://issues.apache.org/jira/browse/NUTCH-508 . Your problem sounds
> similar.
> 
> On 10/4/07, Uygar BAYAR <uy...@beriltech.com> wrote:
>>
>> hi
>> yes i get this error also when i parse a segment..( i have 4 nodes 1
>> namenode)
>> 105 maps 15 reduce
>>
>> ./crawler1/bin/nutch parse sirketce/crawled/segments/20071002163239
>> task_0001_m_000070_0: log4j:ERROR setFile(null,true) call failed.
>> task_0001_m_000070_0: java.io.FileNotFoundException:
>> /home/nutch/crawler1/logs (Is a directory)
>> task_0001_m_000070_0:   at java.io.FileOutputStream.openAppend(Native
>> Method)
>> task_0001_m_000070_0:   at
>> java.io.FileOutputStream.<init>(FileOutputStream.java:177)
>> task_0001_m_000070_0:   at
>> java.io.FileOutputStream.<init>(FileOutputStream.java:102)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.FileAppender.activateOptions(FileAppender.java:163)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:215)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:256)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:132)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:96)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:654)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:612)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:509)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:415)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:441)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:468)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.LogManager.<clinit>(LogManager.java:122)
>> task_0001_m_000070_0:   at
>> org.apache.log4j.Logger.getLogger(Logger.java:104)
>> task_0001_m_000070_0:   at
>> org.apache.commons.logging.impl.Log4JLogger.getLogger(Log4JLogger.java:229)
>> task_0001_m_000070_0:   at
>> org.apache.commons.logging.impl.Log4JLogger.<init>(Log4JLogger.java:65)
>> task_0001_m_000070_0:   at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>> task_0001_m_000070_0:   at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
>> task_0001_m_000070_0:   at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
>> task_0001_m_000070_0:   at
>> java.lang.reflect.Constructor.newInstance(Constructor.java:494)
>> task_0001_m_000070_0:   at
>> org.apache.commons.logging.impl.LogFactoryImpl.newInstance(LogFactoryImpl.java:529)
>> task_0001_m_000070_0:   at
>> org.apache.commons.logging.impl.LogFactoryImpl.getInstance(LogFactoryImpl.java:235)
>> task_0001_m_000070_0:   at
>> org.apache.commons.logging.LogFactory.getLog(LogFactory.java:370)
>> task_0001_m_000070_0:   at
>> org.apache.hadoop.mapred.TaskTracker.<clinit>(TaskTracker.java:84)
>> task_0001_m_000070_0:   at
>> org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:1685)
>> task_0001_m_000070_0: log4j:ERROR Either File or DatePattern options are
>> not
>> set for appender [DRFA].
>> task_0001_m_000070_1: log4j:ERROR setFile(null,true) call failed.
>> task_0001_m_000070_1: java.io.FileNotFoundException:
>> /home/nutch/crawler1/logs (Is a directory)
>> task_0001_m_000070_1:   at java.io.FileOutputStream.openAppend(Native
>> Method)
>> task_0001_m_000070_1:   at
>> java.io.FileOutputStream.<init>(FileOutputStream.java:177)
>> task_0001_m_000070_1:   at
>> java.io.FileOutputStream.<init>(FileOutputStream.java:102)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.FileAppender.activateOptions(FileAppender.java:163)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:215)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:256)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:132)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:96)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:654)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:612)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:509)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:415)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:441)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:468)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.LogManager.<clinit>(LogManager.java:122)
>> task_0001_m_000070_1:   at
>> org.apache.log4j.Logger.getLogger(Logger.java:104)
>> task_0001_m_000070_1:   at
>> org.apache.commons.logging.impl.Log4JLogger.getLogger(Log4JLogger.java:229)
>> task_0001_m_000070_1:   at
>> org.apache.commons.logging.impl.Log4JLogger.<init>(Log4JLogger.java:65)
>> task_0001_m_000070_1:   at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>> task_0001_m_000070_1:   at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
>> task_0001_m_000070_1:   at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
>> task_0001_m_000070_1:   at
>> java.lang.reflect.Constructor.newInstance(Constructor.java:494)
>> task_0001_m_000070_1:   at
>> org.apache.commons.logging.impl.LogFactoryImpl.newInstance(LogFactoryImpl.java:529)
>> task_0001_m_000070_1:   at
>> org.apache.commons.logging.impl.LogFactoryImpl.getInstance(LogFactoryImpl.java:235)
>> task_0001_m_000070_1:   at
>> org.apache.commons.logging.LogFactory.getLog(LogFactory.java:370)
>> task_0001_m_000070_1:   at
>> org.apache.hadoop.mapred.TaskTracker.<clinit>(TaskTracker.java:84)
>> task_0001_m_000070_1:   at
>> org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:1685)
>> task_0001_m_000070_1: log4j:ERROR Either File or DatePattern options are
>> not
>> set for ap
>>
>> vikasran wrote:
>> >
>> > I am running into few issues running nutch with distributed hadoop on 2
>> > nodes:
>> >
>> > Configuration:
>> > 2 nodes. One is master+slave, second node is just slave
>> >
>> > I set mapred.map.tasks and mapred.reduce.tasks to 2.
>> >
>> > Crawl works fine on single node (only one node acting as master+slave).
>> > When I add second node to conf/slaves file, crawl fails with message::
>> > Stopping at depth=0 - no more URLs to fetch
>> >
>> > Please help. I am also seeing log4j error ::
>> > log4j:ERROR setFile(null,true) call failed.
>> > java.io.FileNotFoundException: /nutch/search/logs (Is a directory)
>> >         at java.io.FileOutputStream.openAppend(Native Method)
>> >         at java.io.FileOutputStream.<init>(FileOutputStream.java:177)
>> >         at java.io.FileOutputStream.<init>(FileOutputStream.java:102)
>> >         at org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
>> >
>> >
>> > PLEASE HELP
>> >
>>
>> --
>> View this message in context:
>> http://www.nabble.com/Problems-running-multiple-nutch-nodes-tf4512336.html#a13034350
>> Sent from the Nutch - User mailing list archive at Nabble.com.
>>
>>
> 
> 
> -- 
> Doğacan Güney
> 
> 

-- 
View this message in context: http://www.nabble.com/Problems-running-multiple-nutch-nodes-tf4512336.html#a13037514
Sent from the Nutch - User mailing list archive at Nabble.com.


Re: Problems running multiple nutch nodes

Posted by Doğacan Güney <do...@gmail.com>.
Hi,

Can you try the solution at
https://issues.apache.org/jira/browse/NUTCH-508 . Your problem sounds
similar.

On 10/4/07, Uygar BAYAR <uy...@beriltech.com> wrote:
>
> hi
> yes i get this error also when i parse a segment..( i have 4 nodes 1
> namenode)
> 105 maps 15 reduce
>
> ./crawler1/bin/nutch parse sirketce/crawled/segments/20071002163239
> task_0001_m_000070_0: log4j:ERROR setFile(null,true) call failed.
> task_0001_m_000070_0: java.io.FileNotFoundException:
> /home/nutch/crawler1/logs (Is a directory)
> task_0001_m_000070_0:   at java.io.FileOutputStream.openAppend(Native
> Method)
> task_0001_m_000070_0:   at
> java.io.FileOutputStream.<init>(FileOutputStream.java:177)
> task_0001_m_000070_0:   at
> java.io.FileOutputStream.<init>(FileOutputStream.java:102)
> task_0001_m_000070_0:   at
> org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
> task_0001_m_000070_0:   at
> org.apache.log4j.FileAppender.activateOptions(FileAppender.java:163)
> task_0001_m_000070_0:   at
> org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:215)
> task_0001_m_000070_0:   at
> org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:256)
> task_0001_m_000070_0:   at
> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:132)
> task_0001_m_000070_0:   at
> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:96)
> task_0001_m_000070_0:   at
> org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:654)
> task_0001_m_000070_0:   at
> org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:612)
> task_0001_m_000070_0:   at
> org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:509)
> task_0001_m_000070_0:   at
> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:415)
> task_0001_m_000070_0:   at
> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:441)
> task_0001_m_000070_0:   at
> org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:468)
> task_0001_m_000070_0:   at
> org.apache.log4j.LogManager.<clinit>(LogManager.java:122)
> task_0001_m_000070_0:   at
> org.apache.log4j.Logger.getLogger(Logger.java:104)
> task_0001_m_000070_0:   at
> org.apache.commons.logging.impl.Log4JLogger.getLogger(Log4JLogger.java:229)
> task_0001_m_000070_0:   at
> org.apache.commons.logging.impl.Log4JLogger.<init>(Log4JLogger.java:65)
> task_0001_m_000070_0:   at
> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> task_0001_m_000070_0:   at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
> task_0001_m_000070_0:   at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
> task_0001_m_000070_0:   at
> java.lang.reflect.Constructor.newInstance(Constructor.java:494)
> task_0001_m_000070_0:   at
> org.apache.commons.logging.impl.LogFactoryImpl.newInstance(LogFactoryImpl.java:529)
> task_0001_m_000070_0:   at
> org.apache.commons.logging.impl.LogFactoryImpl.getInstance(LogFactoryImpl.java:235)
> task_0001_m_000070_0:   at
> org.apache.commons.logging.LogFactory.getLog(LogFactory.java:370)
> task_0001_m_000070_0:   at
> org.apache.hadoop.mapred.TaskTracker.<clinit>(TaskTracker.java:84)
> task_0001_m_000070_0:   at
> org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:1685)
> task_0001_m_000070_0: log4j:ERROR Either File or DatePattern options are not
> set for appender [DRFA].
> task_0001_m_000070_1: log4j:ERROR setFile(null,true) call failed.
> task_0001_m_000070_1: java.io.FileNotFoundException:
> /home/nutch/crawler1/logs (Is a directory)
> task_0001_m_000070_1:   at java.io.FileOutputStream.openAppend(Native
> Method)
> task_0001_m_000070_1:   at
> java.io.FileOutputStream.<init>(FileOutputStream.java:177)
> task_0001_m_000070_1:   at
> java.io.FileOutputStream.<init>(FileOutputStream.java:102)
> task_0001_m_000070_1:   at
> org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
> task_0001_m_000070_1:   at
> org.apache.log4j.FileAppender.activateOptions(FileAppender.java:163)
> task_0001_m_000070_1:   at
> org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:215)
> task_0001_m_000070_1:   at
> org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:256)
> task_0001_m_000070_1:   at
> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:132)
> task_0001_m_000070_1:   at
> org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:96)
> task_0001_m_000070_1:   at
> org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:654)
> task_0001_m_000070_1:   at
> org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:612)
> task_0001_m_000070_1:   at
> org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:509)
> task_0001_m_000070_1:   at
> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:415)
> task_0001_m_000070_1:   at
> org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:441)
> task_0001_m_000070_1:   at
> org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:468)
> task_0001_m_000070_1:   at
> org.apache.log4j.LogManager.<clinit>(LogManager.java:122)
> task_0001_m_000070_1:   at
> org.apache.log4j.Logger.getLogger(Logger.java:104)
> task_0001_m_000070_1:   at
> org.apache.commons.logging.impl.Log4JLogger.getLogger(Log4JLogger.java:229)
> task_0001_m_000070_1:   at
> org.apache.commons.logging.impl.Log4JLogger.<init>(Log4JLogger.java:65)
> task_0001_m_000070_1:   at
> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> task_0001_m_000070_1:   at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
> task_0001_m_000070_1:   at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
> task_0001_m_000070_1:   at
> java.lang.reflect.Constructor.newInstance(Constructor.java:494)
> task_0001_m_000070_1:   at
> org.apache.commons.logging.impl.LogFactoryImpl.newInstance(LogFactoryImpl.java:529)
> task_0001_m_000070_1:   at
> org.apache.commons.logging.impl.LogFactoryImpl.getInstance(LogFactoryImpl.java:235)
> task_0001_m_000070_1:   at
> org.apache.commons.logging.LogFactory.getLog(LogFactory.java:370)
> task_0001_m_000070_1:   at
> org.apache.hadoop.mapred.TaskTracker.<clinit>(TaskTracker.java:84)
> task_0001_m_000070_1:   at
> org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:1685)
> task_0001_m_000070_1: log4j:ERROR Either File or DatePattern options are not
> set for ap
>
> vikasran wrote:
> >
> > I am running into few issues running nutch with distributed hadoop on 2
> > nodes:
> >
> > Configuration:
> > 2 nodes. One is master+slave, second node is just slave
> >
> > I set mapred.map.tasks and mapred.reduce.tasks to 2.
> >
> > Crawl works fine on single node (only one node acting as master+slave).
> > When I add second node to conf/slaves file, crawl fails with message::
> > Stopping at depth=0 - no more URLs to fetch
> >
> > Please help. I am also seeing log4j error ::
> > log4j:ERROR setFile(null,true) call failed.
> > java.io.FileNotFoundException: /nutch/search/logs (Is a directory)
> >         at java.io.FileOutputStream.openAppend(Native Method)
> >         at java.io.FileOutputStream.<init>(FileOutputStream.java:177)
> >         at java.io.FileOutputStream.<init>(FileOutputStream.java:102)
> >         at org.apache.log4j.FileAppender.setFile(FileAppender.java:289)
> >
> >
> > PLEASE HELP
> >
>
> --
> View this message in context: http://www.nabble.com/Problems-running-multiple-nutch-nodes-tf4512336.html#a13034350
> Sent from the Nutch - User mailing list archive at Nabble.com.
>
>


-- 
Doğacan Güney