You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-user@hadoop.apache.org by "Lamia M. Youseff" <ly...@cs.ucsb.edu> on 2007/03/20 22:28:28 UTC

java.io.IOException: Broken pipe error

Hi,
I am new to hadoop, and I am trying to get it running for a single node. 
I am getting an error with starting secondarynamenode, and with simple 
testing as "bin/hadoop jar hadoop-0.11.2-examples.jar grep /bar/ac 
/tmp/ac foo". I have included my hadoop-site.xml and error log below.
Please, advise me what could have went wrong.
Thank you,


root@manatee hadoop]# bin/hadoop namenode -format
Re-format filesystem in /tmp/hadoop-root/dfs/name ? (Y or N) Y
Formatted /tmp/hadoop-root/dfs/name

[root@machinename hadoop]# bin/start-all.sh
starting namenode, logging to /home/lyouseff/benchmarks
/Hadoop/hadoop/bin/../logs/hadoop-lyouseff-namenode-machinename.cs.ucsb.edu.out 

localhost: starting datanode, logging to 
/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs/hadoop-root-datanode-machinename.cs.ucsb.edu.out
localhost: starting secondarynamenode, logging to 
/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs/hadoop- 
root-secondarynamenode-machinename.cs.ucsb.edu.out
localhost: Exception in thread "main" java.io.IOException: Broken pipe
localhost:    at 
org.apache.hadoop.ipc.Client$Connection$2.write(Client.java:185)
localhost:    at java.io.BufferedOutputStream.flush(libgcj.so.7rh)
localhost:    at java.io.DataOutputStream.flush(libgcj.so.7rh)
localhost:    at 
org.apache.hadoop.ipc.Client$Connection.sendParam(Client.java:320)
localhost:    at org.apache.hadoop.ipc.Client.call(Client.java:457)
localhost:    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:164)
localhost:    at 
org.apache.hadoop.dfs.$Proxy0.getProtocolVersion(Unknown Source)
localhost:    at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:248)
localhost:    at 
org.apache.hadoop.dfs.SecondaryNameNode.<init>(SecondaryNameNode.java:96)
starting jobtracker, logging to 
/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs/hadoop- 
lyouseff-jobtracker-machinename.cs.ucsb.edu.out
localhost: starting tasktracker, logging to 
/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs 
/hadoop-root-tasktracker-machinename.cs.ucsb.edu.out
localhost: Exception in thread "main" java.lang.NoClassDefFoundError: 
org.apache.hadoop.mapred.TaskTracker
localhost:    at java.lang.Class.initializeClass(libgcj.so.7rh)
localhost: Caused by: java.lang.ClassNotFoundException: 
java.util.concurrent.BlockingQueue not found in 
gnu.gcj.runtime.SystemClassLoader{urls=[file:/home/lyouseff/benchmarks/Hadoop/hadoop/conf/,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../hadoop-0.11.2-core.jar 
,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-cli-2.0-SNAPSHOT.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-codec-1.3.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons- 
httpclient-3.0.1.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-logging-1.0.4.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-logging-api-1.0.4.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jets3t.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty- 
5.1.4.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/junit-3.8.1.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/log4j-1.2.13.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/servlet-api.jar 
,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/ant.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/commons-el.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/jasper- 
compiler.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/jasper-runtime.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/jsp-api.jar], 
parent=gnu.gcj.runtime.ExtensionClassLoader{urls= [], parent=null}}
localhost:    at java.net.URLClassLoader.findClass(libgcj.so.7rh)
localhost:    at gnu.gcj.runtime.SystemClassLoader.findClass(libgcj.so.7rh)
localhost:    at java.lang.ClassLoader.loadClass(libgcj.so.7rh )
localhost:    at java.lang.ClassLoader.loadClass(libgcj.so.7rh)
localhost:    at java.lang.Class.initializeClass(libgcj.so.7rh)


[root@machinename hadoop]# bin/hadoop jar hadoop-0.11.2-examples.jar 
grep /bar/ac /tmp/ac foo
java.lang.RuntimeException: java.io.IOException: Broken pipe
   at org.apache.hadoop.mapred.JobConf.getWorkingDirectory(JobConf.java:247)
   at org.apache.hadoop.mapred.JobConf.setInputPath(JobConf.java:150)
   at org.apache.hadoop.examples.Grep.main(Grep.java:54)
   at java.lang.reflect.Method.invoke(libgcj.so.7rh)
   at 
org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:71)
   at org.apache.hadoop.util.ProgramDriver.driver (ProgramDriver.java:143)
   at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:40)
   at java.lang.reflect.Method.invoke(libgcj.so.7rh)
   at org.apache.hadoop.util.RunJar.main(RunJar.java:155)
Caused by: java.io.IOException: Broken pipe
   at org.apache.hadoop.ipc.Client$Connection$2.write(Client.java:185)
   at java.io.BufferedOutputStream.flush(libgcj.so.7rh)
   at java.io.DataOutputStream.flush(libgcj.so.7rh )
   at org.apache.hadoop.ipc.Client$Connection.sendParam(Client.java:320)
   at org.apache.hadoop.ipc.Client.call(Client.java:457)
   at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:164)
   at org.apache.hadoop.dfs.$Proxy0.getProtocolVersion (Unknown Source)
   at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:248)
   at org.apache.hadoop.dfs.DFSClient.<init>(DFSClient.java:106)
   at 
org.apache.hadoop.dfs.DistributedFileSystem.initialize(DistributedFileSystem.java 
:65)
   at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:160)
   at org.apache.hadoop.fs.FileSystem .getNamed(FileSystem.java:119)
   at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:91)
   at org.apache.hadoop.mapred.JobConf.getWorkingDirectory 
(JobConf.java:243)
   ...8 more
07/03/20 12:11:53 INFO ipc.Client: java.io.IOException: Connection refused
   at java.io.FilterInputStream.read(libgcj.so.7rh)
   at org.apache.hadoop.ipc.Client$Connection$1.read (Client.java:174)
   at java.io.BufferedInputStream.refill(libgcj.so.7rh)
   at java.io.BufferedInputStream.read(libgcj.so.7rh)
   at java.io.DataInputStream.readFully(libgcj.so.7rh)
   at java.io.DataInputStream.readInt (libgcj.so.7rh)
   at org.apache.hadoop.ipc.Client$Connection.run(Client.java:251)


[root@machinename hadoop]# cat /etc/hosts
# Do not remove the following line, or various programs
# that require network functionality will fail.
127.0.0.1 <http://127.0.0.1>       localhost
127.0.0.1 <http://127.0.0.1>       localhost.localdomain localhost
127.0.0.1       machinename.cs.ucsb.edu machinename
::1                 localhost.localdomain localhost

Re: java.io.IOException: Broken pipe error

Posted by Michael Bieniosek <mi...@powerset.com>.
It looks like your jre is missing java.util.concurrent.BlockingQueue.

-Michael

On 3/20/07 2:28 PM, "Lamia M. Youseff" <ly...@cs.ucsb.edu> wrote:

> Hi,
> I am new to hadoop, and I am trying to get it running for a single node.
> I am getting an error with starting secondarynamenode, and with simple
> testing as "bin/hadoop jar hadoop-0.11.2-examples.jar grep /bar/ac
> /tmp/ac foo". I have included my hadoop-site.xml and error log below.
> Please, advise me what could have went wrong.
> Thank you,
> 
> 
> root@manatee hadoop]# bin/hadoop namenode -format
> Re-format filesystem in /tmp/hadoop-root/dfs/name ? (Y or N) Y
> Formatted /tmp/hadoop-root/dfs/name
> 
> [root@machinename hadoop]# bin/start-all.sh
> starting namenode, logging to /home/lyouseff/benchmarks
> /Hadoop/hadoop/bin/../logs/hadoop-lyouseff-namenode-machinename.cs.ucsb.edu.ou
> t 
> 
> localhost: starting datanode, logging to
> /home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs/hadoop-root-datanode-machi
> nename.cs.ucsb.edu.out
> localhost: starting secondarynamenode, logging to
> /home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs/hadoop-
> root-secondarynamenode-machinename.cs.ucsb.edu.out
> localhost: Exception in thread "main" java.io.IOException: Broken pipe
> localhost:    at 
> org.apache.hadoop.ipc.Client$Connection$2.write(Client.java:185)
> localhost:    at java.io.BufferedOutputStream.flush(libgcj.so.7rh)
> localhost:    at java.io.DataOutputStream.flush(libgcj.so.7rh)
> localhost:    at 
> org.apache.hadoop.ipc.Client$Connection.sendParam(Client.java:320)
> localhost:    at org.apache.hadoop.ipc.Client.call(Client.java:457)
> localhost:    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:164)
> localhost:    at 
> org.apache.hadoop.dfs.$Proxy0.getProtocolVersion(Unknown Source)
> localhost:    at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:248)
> localhost:    at 
> org.apache.hadoop.dfs.SecondaryNameNode.<init>(SecondaryNameNode.java:96)
> starting jobtracker, logging to
> /home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs/hadoop-
> lyouseff-jobtracker-machinename.cs.ucsb.edu.out
> localhost: starting tasktracker, logging to
> /home/lyouseff/benchmarks/Hadoop/hadoop/bin/../logs
> /hadoop-root-tasktracker-machinename.cs.ucsb.edu.out
> localhost: Exception in thread "main" java.lang.NoClassDefFoundError:
> org.apache.hadoop.mapred.TaskTracker
> localhost:    at java.lang.Class.initializeClass(libgcj.so.7rh)
> localhost: Caused by: java.lang.ClassNotFoundException:
> java.util.concurrent.BlockingQueue not found in
> gnu.gcj.runtime.SystemClassLoader{urls=[file:/home/lyouseff/benchmarks/Hadoop/
> hadoop/conf/,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../,file:/home/l
> youseff/benchmarks/Hadoop/hadoop/bin/../hadoop-0.11.2-core.jar
> ,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-cli-2.0-SNAPS
> HOT.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-codec-
> 1.3.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/commons-
> httpclient-3.0.1.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/c
> ommons-logging-1.0.4.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../l
> ib/commons-logging-api-1.0.4.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/
> bin/../lib/jets3t.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/
> jetty- 
> 5.1.4.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/junit-3.8.1.
> jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/log4j-1.2.13.jar,f
> ile:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/servlet-api.jar
> ,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/ant.jar,fil
> e:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/commons-el.jar,
> file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext/jasper-
> compiler.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/jetty-ext
> /jasper-runtime.jar,file:/home/lyouseff/benchmarks/Hadoop/hadoop/bin/../lib/je
> tty-ext/jsp-api.jar],
> parent=gnu.gcj.runtime.ExtensionClassLoader{urls= [], parent=null}}
> localhost:    at java.net.URLClassLoader.findClass(libgcj.so.7rh)
> localhost:    at gnu.gcj.runtime.SystemClassLoader.findClass(libgcj.so.7rh)
> localhost:    at java.lang.ClassLoader.loadClass(libgcj.so.7rh )
> localhost:    at java.lang.ClassLoader.loadClass(libgcj.so.7rh)
> localhost:    at java.lang.Class.initializeClass(libgcj.so.7rh)
> 
> 
> [root@machinename hadoop]# bin/hadoop jar hadoop-0.11.2-examples.jar
> grep /bar/ac /tmp/ac foo
> java.lang.RuntimeException: java.io.IOException: Broken pipe
>    at org.apache.hadoop.mapred.JobConf.getWorkingDirectory(JobConf.java:247)
>    at org.apache.hadoop.mapred.JobConf.setInputPath(JobConf.java:150)
>    at org.apache.hadoop.examples.Grep.main(Grep.java:54)
>    at java.lang.reflect.Method.invoke(libgcj.so.7rh)
>    at 
> org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.j
> ava:71)
>    at org.apache.hadoop.util.ProgramDriver.driver (ProgramDriver.java:143)
>    at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:40)
>    at java.lang.reflect.Method.invoke(libgcj.so.7rh)
>    at org.apache.hadoop.util.RunJar.main(RunJar.java:155)
> Caused by: java.io.IOException: Broken pipe
>    at org.apache.hadoop.ipc.Client$Connection$2.write(Client.java:185)
>    at java.io.BufferedOutputStream.flush(libgcj.so.7rh)
>    at java.io.DataOutputStream.flush(libgcj.so.7rh )
>    at org.apache.hadoop.ipc.Client$Connection.sendParam(Client.java:320)
>    at org.apache.hadoop.ipc.Client.call(Client.java:457)
>    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:164)
>    at org.apache.hadoop.dfs.$Proxy0.getProtocolVersion (Unknown Source)
>    at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:248)
>    at org.apache.hadoop.dfs.DFSClient.<init>(DFSClient.java:106)
>    at 
> org.apache.hadoop.dfs.DistributedFileSystem.initialize(DistributedFileSystem.j
> ava 
> :65)
>    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:160)
>    at org.apache.hadoop.fs.FileSystem .getNamed(FileSystem.java:119)
>    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:91)
>    at org.apache.hadoop.mapred.JobConf.getWorkingDirectory
> (JobConf.java:243)
>    ...8 more
> 07/03/20 12:11:53 INFO ipc.Client: java.io.IOException: Connection refused
>    at java.io.FilterInputStream.read(libgcj.so.7rh)
>    at org.apache.hadoop.ipc.Client$Connection$1.read (Client.java:174)
>    at java.io.BufferedInputStream.refill(libgcj.so.7rh)
>    at java.io.BufferedInputStream.read(libgcj.so.7rh)
>    at java.io.DataInputStream.readFully(libgcj.so.7rh)
>    at java.io.DataInputStream.readInt (libgcj.so.7rh)
>    at org.apache.hadoop.ipc.Client$Connection.run(Client.java:251)
> 
> 
> [root@machinename hadoop]# cat /etc/hosts
> # Do not remove the following line, or various programs
> # that require network functionality will fail.
> 127.0.0.1 <http://127.0.0.1>       localhost
> 127.0.0.1 <http://127.0.0.1>       localhost.localdomain localhost
> 127.0.0.1       machinename.cs.ucsb.edu machinename
> ::1                 localhost.localdomain localhost