You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-user@hadoop.apache.org by "Kang, Seunghwa" <s....@gatech.edu> on 2009/06/08 09:38:19 UTC

"java.io.IOException: Could not read from stream" & "java.io.IOException: Bad connect ack with firstBadLink 130.207.114.164:50010"

Hello,

I am using Hadoop 0.19.1 and my job (this will run for 10 hours to finish) failed in the middle of execution (once 21% and second 81% finished) with the error messages like

java.io.IOException: Could not read from stream
...
java.io.IOException: Bad connect ack with firstBadLink 130.207.114.164:50010"
...

Also, if I type netstat in 130.207.114.164, I can see many tcp connections are in TIME_WAIT state.

Do you have any ideas? The program is very simple and just adds time stamp to the every line of input data.

I attached the output error message.

Thank you very much,

09/06/07 23:17:03 INFO mapred.FileInputFormat: Total input paths to process : 1
09/06/07 23:18:31 INFO mapred.JobClient: Running job: job_200905211624_0024
09/06/07 23:18:32 INFO mapred.JobClient:  map 0% reduce 0%
09/06/07 23:19:16 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000016_0, Status : FAILED
java.io.IOException: Could not read from stream
        at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
        at java.io.DataInputStream.readByte(DataInputStream.java:248)
        at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
        at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
        at org.apache.hadoop.io.Text.readString(Text.java:400)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:19:18 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000021_0, Status : FAILED
java.io.IOException: Could not read from stream
        at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
        at java.io.DataInputStream.readByte(DataInputStream.java:248)
        at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
        at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
        at org.apache.hadoop.io.Text.readString(Text.java:400)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:19:43 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000016_1, Status : FAILED
java.io.IOException: Could not read from stream
        at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
        at java.io.DataInputStream.readByte(DataInputStream.java:248)
        at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
        at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
        at org.apache.hadoop.io.Text.readString(Text.java:400)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:19:45 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000021_1, Status : FAILED
java.io.IOException: Could not read from stream
        at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
        at java.io.DataInputStream.readByte(DataInputStream.java:248)
        at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
        at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
        at org.apache.hadoop.io.Text.readString(Text.java:400)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:20:11 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000016_2, Status : FAILED
java.io.IOException: Bad connect ack with firstBadLink 130.207.114.164:50010
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2780)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:20:12 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000055_0, Status : FAILED
java.io.IOException: Bad connect ack with firstBadLink 130.207.114.162:50010
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2780)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:20:14 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000021_2, Status : FAILED
java.io.IOException: Could not read from stream
        at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
        at java.io.DataInputStream.readByte(DataInputStream.java:248)
        at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
        at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
        at org.apache.hadoop.io.Text.readString(Text.java:400)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:20:37 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000115_0, Status : FAILED
java.io.IOException: Bad connect ack with firstBadLink 130.207.114.163:50010
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2780)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

09/06/07 23:20:40 INFO mapred.JobClient: Task Id : attempt_200905211624_0024_m_000055_1, Status : FAILED
java.io.IOException: Could not read from stream
        at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
        at java.io.DataInputStream.readByte(DataInputStream.java:248)
        at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
        at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
        at org.apache.hadoop.io.Text.readString(Text.java:400)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
        at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)

java.io.IOException: Job failed!
        at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1232)
        at org.mysrc.AddTSMR.run(AddTSMR.java:60)
        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
        at org.mysrc.AddTSMR.main(AddTSMR.java:66)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:165)
        at org.apache.hadoop.mapred.JobShell.run(JobShell.java:54)
        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:79)
        at org.apache.hadoop.mapred.JobShell.main(JobShell.java:68)

Re: "java.io.IOException: Could not read from stream" & "java.io.IOException: Bad connect ack with firstBadLink 130.207.114.164:50010"

Posted by Taeho Kang <tk...@gmail.com>.
If "The program is very simple and just adds time stamp to the every line of
input data." is what your job actually does, then you may have to think
about changing your jobs.
Having said your job takes 10 hours to finish, I guess you have tons of data
to process (maybe hundreds of gigabytes?). The problem is your data doesn't
get trimmed down in mapper stages and they'll all get sent to reducers ,
which probably aren't as many as your mappers.

Think about the situation where multiple machines (where mappers are run)
simultaneously sending tens of gigabytes to only a few machines (where
reducers are run)... I guess one of your machines have been overloaded..

You might want to increase the number of reducers,
or you may have to change your job so that your mapper cuts down any
unnecessary data in mapper stage.

Hope this helps,

/Taeho




On Mon, Jun 8, 2009 at 4:38 PM, Kang, Seunghwa <s....@gatech.edu> wrote:

> Hello,
>
> I am using Hadoop 0.19.1 and my job (this will run for 10 hours to finish)
> failed in the middle of execution (once 21% and second 81% finished) with
> the error messages like
>
> java.io.IOException: Could not read from stream
> ...
> java.io.IOException: Bad connect ack with firstBadLink
> 130.207.114.164:50010"
> ...
>
> Also, if I type netstat in 130.207.114.164, I can see many tcp connections
> are in TIME_WAIT state.
>
> Do you have any ideas? The program is very simple and just adds time stamp
> to the every line of input data.
>
> I attached the output error message.
>
> Thank you very much,
>
> 09/06/07 23:17:03 INFO mapred.FileInputFormat: Total input paths to process
> : 1
> 09/06/07 23:18:31 INFO mapred.JobClient: Running job: job_200905211624_0024
> 09/06/07 23:18:32 INFO mapred.JobClient:  map 0% reduce 0%
> 09/06/07 23:19:16 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000016_0, Status : FAILED
> java.io.IOException: Could not read from stream
>        at
> org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
>        at java.io.DataInputStream.readByte(DataInputStream.java:248)
>        at
> org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>        at
> org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>        at org.apache.hadoop.io.Text.readString(Text.java:400)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:19:18 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000021_0, Status : FAILED
> java.io.IOException: Could not read from stream
>        at
> org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
>        at java.io.DataInputStream.readByte(DataInputStream.java:248)
>        at
> org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>        at
> org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>        at org.apache.hadoop.io.Text.readString(Text.java:400)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:19:43 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000016_1, Status : FAILED
> java.io.IOException: Could not read from stream
>        at
> org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
>        at java.io.DataInputStream.readByte(DataInputStream.java:248)
>        at
> org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>        at
> org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>        at org.apache.hadoop.io.Text.readString(Text.java:400)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:19:45 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000021_1, Status : FAILED
> java.io.IOException: Could not read from stream
>        at
> org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
>        at java.io.DataInputStream.readByte(DataInputStream.java:248)
>        at
> org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>        at
> org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>        at org.apache.hadoop.io.Text.readString(Text.java:400)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:20:11 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000016_2, Status : FAILED
> java.io.IOException: Bad connect ack with firstBadLink
> 130.207.114.164:50010
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2780)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:20:12 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000055_0, Status : FAILED
> java.io.IOException: Bad connect ack with firstBadLink
> 130.207.114.162:50010
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2780)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:20:14 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000021_2, Status : FAILED
> java.io.IOException: Could not read from stream
>        at
> org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
>        at java.io.DataInputStream.readByte(DataInputStream.java:248)
>        at
> org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>        at
> org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>        at org.apache.hadoop.io.Text.readString(Text.java:400)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:20:37 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000115_0, Status : FAILED
> java.io.IOException: Bad connect ack with firstBadLink
> 130.207.114.163:50010
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2780)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> 09/06/07 23:20:40 INFO mapred.JobClient: Task Id :
> attempt_200905211624_0024_m_000055_1, Status : FAILED
> java.io.IOException: Could not read from stream
>        at
> org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:119)
>        at java.io.DataInputStream.readByte(DataInputStream.java:248)
>        at
> org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>        at
> org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>        at org.apache.hadoop.io.Text.readString(Text.java:400)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.createBlockOutputStream(DFSClient.java:2778)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2703)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:1996)
>        at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2183)
>
> java.io.IOException: Job failed!
>        at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1232)
>        at org.mysrc.AddTSMR.run(AddTSMR.java:60)
>        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
>        at org.mysrc.AddTSMR.main(AddTSMR.java:66)
>        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>        at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>        at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>        at java.lang.reflect.Method.invoke(Method.java:597)
>        at org.apache.hadoop.util.RunJar.main(RunJar.java:165)
>        at org.apache.hadoop.mapred.JobShell.run(JobShell.java:54)
>        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
>        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:79)
>        at org.apache.hadoop.mapred.JobShell.main(JobShell.java:68)
>