You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-user@hadoop.apache.org by EdwardKing <zh...@neusoft.com> on 2014/06/13 11:04:20 UTC

hadoop-mapreduce-examples fail

I run hadoop2.2.0 example under Red HatLinux6.3, like follows:

[yarn@localhost bin]$ ./hadoop-daemon.sh start namenode

[yarn@localhost bin]$ ./hadoop-daemon.sh start secondarynamenode
[yarn@localhost bin]$ ./hadoop-daemon.sh start datanode
[yarn@localhost bin]$ ./mr-jobhistory-daemon.sh start historyserver
[yarn@localhost bin]$ ./yarn-daemon.sh start resourcemanager
[yarn@localhost bin]$ ./yarn-daemon.sh start nodemanager
[yarn@localhost bin]$ jps
3387 ResourceManager
3181 DataNode
3287 JobHistoryServer
6499 Jps
3624 NodeManager
3126 SecondaryNameNode
3044 NameNode

[yarn@localhost bin]$ ./yarn jar /opt/yarn/hadoop-2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.2.0.jar pi 16 1000

Then I open firefox and visit http://localhost:8088/cluster,it raise following result



Firefox shows FinalStatus is SUCCEEDED,but hadoop command windows shows following error,why? Where is the PI value? Where is wrong? How to correct it? All logs file in attachment.Please help me. Thanks.

14/06/13 01:07:51 INFO mapreduce.Job: Task Id : attempt_1402645686978_0001_m_000006_0, Status : FAILED
Error: java.io.IOException: Failed on local exception: java.io.IOException: java.net.SocketTimeoutException: 60000 millis timeout while waiting for channel to be ready for read. ch : java.nio.channels.SocketChannel[connected local=/127.0.0.1:44933 remote=localhost/127.0.0.1:9000]; Host Details : local host is: "localhost.localdomain/127.0.0.1"; destination host is: "localhost":9000; 
 at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:764)
 at org.apache.hadoop.ipc.Client.call(Client.java:1351)
 at org.apache.hadoop.ipc.Client.call(Client.java:1300)
 at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:206)
 at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:606)
 at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:186)
 at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
 at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:188)
 at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1064)
 at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1054)
 at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1044)
 at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:235)
 at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:202)
 at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:195)
 at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1212)
 at org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:290)
 at org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:286)
 at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
 at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:286)
 at org.apache.hadoop.io.SequenceFile$Reader.openFile(SequenceFile.java:1832)
 at org.apache.hadoop.io.SequenceFile$Reader.<init>(SequenceFile.java:1752)
 at org.apache.hadoop.io.SequenceFile$Reader.<init>(SequenceFile.java:1773)
 at org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader.initialize(SequenceFileRecordReader.java:54)
 at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.initialize(MapTask.java:524)
 at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:762)
 at org.apache.hadoop.mapred.MapTask.run(MapTask.java:339)
 at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:162)
 at java.security.AccessController.doPrivileged(Native Method)
 at javax.security.auth.Subject.doAs(Subject.java:415)
 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
 at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:157)
Caused by: java.io.IOException: java.net.SocketTimeoutException: 60000 millis timeout while waiting for channel to be ready for read. ch : java.nio.channels.SocketChannel[connected local=/127.0.0.1:44933 remote=localhost/127.0.0.1:9000]
 at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:620)
 at java.security.AccessController.doPrivileged(Native Method)
 at javax.security.auth.Subject.doAs(Subject.java:415)
 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
 at org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:583)
 at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:667)
 at org.apache.hadoop.ipc.Client$Connection.access$2600(Client.java:314)
 at org.apache.hadoop.ipc.Client.getConnection(Client.java:1399)
 at org.apache.hadoop.ipc.Client.call(Client.java:1318)
 ... 34 more
Caused by: java.net.SocketTimeoutException: 60000 millis timeout while waiting for channel to be ready for read. ch : java.nio.channels.SocketChannel[connected local=/127.0.0.1:44933 remote=localhost/127.0.0.1:9000]
 at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:164)
 at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161)
 at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131)
 at java.io.FilterInputStream.read(FilterInputStream.java:133)
 at java.io.BufferedInputStream.fill(BufferedInputStream.java:235)
 at java.io.BufferedInputStream.read(BufferedInputStream.java:254)
 at java.io.DataInputStream.readInt(DataInputStream.java:387)
 at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:358)
 at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:494)
 at org.apache.hadoop.ipc.Client$Connection.access$1700(Client.java:314)
 at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:659)
 at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:655)
 at java.security.AccessController.doPrivileged(Native Method)
 at javax.security.auth.Subject.doAs(Subject.java:415)
 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
 at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:654)
 ... 37 more

Container killed by the ApplicationMaster.

14/06/13 01:07:52 INFO mapreduce.Job:  map 44% reduce 0%
14/06/13 01:12:46 INFO mapreduce.Job:  map 44% reduce 15%
14/06/13 01:15:05 INFO mapreduce.Job:  map 75% reduce 15%
14/06/13 01:15:28 INFO mapreduce.Job:  map 75% reduce 25%
14/06/13 01:28:48 INFO mapreduce.Job:  map 100% reduce 25%
14/06/13 01:29:04 INFO mapreduce.Job:  map 100% reduce 33%
14/06/13 01:29:14 INFO mapreduce.Job:  map 100% reduce 100%
14/06/13 01:29:43 INFO mapred.ClientServiceDelegate: Application state is completed. FinalApplicationStatus=SUCCEEDED. Redirecting to job history server
java.io.IOException: Job status not available 
 at org.apache.hadoop.mapreduce.Job.updateStatus(Job.java:322)
 at org.apache.hadoop.mapreduce.Job.isComplete(Job.java:599)
 at org.apache.hadoop.mapreduce.Job.monitorAndPrintJob(Job.java:1327)
 at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1289)
 at org.apache.hadoop.examples.QuasiMonteCarlo.estimatePi(QuasiMonteCarlo.java:306)
 at org.apache.hadoop.examples.QuasiMonteCarlo.run(QuasiMonteCarlo.java:354)
 at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
 at org.apache.hadoop.examples.QuasiMonteCarlo.main(QuasiMonteCarlo.java:363)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:606)
 at org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:72)
 at org.apache.hadoop.util.ProgramDriver.run(ProgramDriver.java:144)
 at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:74)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:606)
 at org.apache.hadoop.util.RunJar.main(RunJar.java:212)