You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-user@hadoop.apache.org by Sujit Dhamale <su...@gmail.com> on 2012/04/02 19:28:37 UTC

Getting RemoteException: while copying data from Local machine to HDFS

Getting RemoteException: while copying data from Local machine to HDFS

Hadoop version : hadoop-0.20.203.0rc1.tar
Operaring Syatem : ubuntu 11.10

hduser@sujit:~/Desktop/data$ jps
6022 NameNode
7100 Jps
6569 JobTracker
6798 TaskTracker
6491 SecondaryNameNode
hduser@sujit:~/Desktop/data$



hduser@sujit:~/Desktop/data$ ls
pg20417.txt  pg4300.txt  pg5000.txt


hduser@sujit:~/Desktop/hadoop/bin$ hadoop dfs -copyFromLocal
/home/hduser/Desktop/data /user/hduser/data
12/04/02 22:51:37 WARN hdfs.DFSClient: DataStreamer Exception:
org.apache.hadoop.ipc.RemoteException: java.io.IOException: File
/user/hduser/data/pg20417.txt could only be replicated to 0 nodes, instead
of 1
    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1417)
    at
org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:596)
    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:523)
    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1383)
    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1379)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:396)
    at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1377)

    at org.apache.hadoop.ipc.Client.call(Client.java:1030)
    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:224)
    at $Proxy1.addBlock(Unknown Source)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:82)
    at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:59)
    at $Proxy1.addBlock(Unknown Source)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3104)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2975)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:2255)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2446)

12/04/02 22:51:37 WARN hdfs.DFSClient: Error Recovery for block null bad
datanode[0] nodes == null
12/04/02 22:51:37 WARN hdfs.DFSClient: Could not get block locations.
Source file "/user/hduser/data/pg20417.txt" - Aborting...
copyFromLocal: java.io.IOException: File /user/hduser/data/pg20417.txt
could only be replicated to 0 nodes, instead of 1
12/04/02 22:51:37 ERROR hdfs.DFSClient: Exception closing file
/user/hduser/data/pg20417.txt : org.apache.hadoop.ipc.RemoteException:
java.io.IOException: File /user/hduser/data/pg20417.txt could only be
replicated to 0 nodes, instead of 1
    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1417)
    at
org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:596)
    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:523)
    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1383)
    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1379)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:396)
    at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1377)

org.apache.hadoop.ipc.RemoteException: java.io.IOException: File
/user/hduser/data/pg20417.txt could only be replicated to 0 nodes, instead
of 1
    at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1417)
    at
org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:596)
    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:523)
    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1383)
    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1379)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:396)
    at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1377)

    at org.apache.hadoop.ipc.Client.call(Client.java:1030)
    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:224)
    at $Proxy1.addBlock(Unknown Source)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:82)
    at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:59)
    at $Proxy1.addBlock(Unknown Source)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3104)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2975)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:2255)
    at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2446)
hduser@sujit:~/Desktop/hadoop/bin

Re: Getting RemoteException: while copying data from Local machine to HDFS

Posted by Harsh J <ha...@cloudera.com>.
Per your jps, you don't have a DataNode running.

> hduser@sujit:~/Desktop/data$ ups
> 6022 NameNode
> 7100 Jps
> 6569 JobTracker
> 6798 TaskTracker
> 6491 SecondaryNameNode

Please read http://wiki.apache.org/hadoop/CouldOnlyBeReplicatedTo to
solve this. You most likely need to also read:
http://search-hadoop.com/m/l4JWggvLE2

On Mon, Apr 2, 2012 at 10:58 PM, Sujit Dhamale <su...@gmail.com> wrote:
> Getting RemoteException: while copying data from Local machine to HDFS
>
> Hadoop version : hadoop-0.20.203.0rc1.tar
> Operaring Syatem : ubuntu 11.10
>
> hduser@sujit:~/Desktop/data$ jps
> 6022 NameNode
> 7100 Jps
> 6569 JobTracker
> 6798 TaskTracker
> 6491 SecondaryNameNode
> hduser@sujit:~/Desktop/data$
>
>
>
> hduser@sujit:~/Desktop/data$ ls
> pg20417.txt  pg4300.txt  pg5000.txt
>
>
> hduser@sujit:~/Desktop/hadoop/bin$ hadoop dfs -copyFromLocal
> /home/hduser/Desktop/data /user/hduser/data
> 12/04/02 22:51:37 WARN hdfs.DFSClient: DataStreamer Exception:
> org.apache.hadoop.ipc.RemoteException: java.io.IOException: File
> /user/hduser/data/pg20417.txt could only be replicated to 0 nodes, instead
> of 1
>    at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1417)
>    at
> org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:596)
>    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
>    at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>    at java.lang.reflect.Method.invoke(Method.java:597)
>    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:523)
>    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1383)
>    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1379)
>    at java.security.AccessController.doPrivileged(Native Method)
>    at javax.security.auth.Subject.doAs(Subject.java:396)
>    at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
>    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1377)
>
>    at org.apache.hadoop.ipc.Client.call(Client.java:1030)
>    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:224)
>    at $Proxy1.addBlock(Unknown Source)
>    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>    at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>    at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>    at java.lang.reflect.Method.invoke(Method.java:597)
>    at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:82)
>    at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:59)
>    at $Proxy1.addBlock(Unknown Source)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3104)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2975)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:2255)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2446)
>
> 12/04/02 22:51:37 WARN hdfs.DFSClient: Error Recovery for block null bad
> datanode[0] nodes == null
> 12/04/02 22:51:37 WARN hdfs.DFSClient: Could not get block locations.
> Source file "/user/hduser/data/pg20417.txt" - Aborting...
> copyFromLocal: java.io.IOException: File /user/hduser/data/pg20417.txt
> could only be replicated to 0 nodes, instead of 1
> 12/04/02 22:51:37 ERROR hdfs.DFSClient: Exception closing file
> /user/hduser/data/pg20417.txt : org.apache.hadoop.ipc.RemoteException:
> java.io.IOException: File /user/hduser/data/pg20417.txt could only be
> replicated to 0 nodes, instead of 1
>    at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1417)
>    at
> org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:596)
>    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
>    at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>    at java.lang.reflect.Method.invoke(Method.java:597)
>    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:523)
>    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1383)
>    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1379)
>    at java.security.AccessController.doPrivileged(Native Method)
>    at javax.security.auth.Subject.doAs(Subject.java:396)
>    at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
>    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1377)
>
> org.apache.hadoop.ipc.RemoteException: java.io.IOException: File
> /user/hduser/data/pg20417.txt could only be replicated to 0 nodes, instead
> of 1
>    at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1417)
>    at
> org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:596)
>    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
>    at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>    at java.lang.reflect.Method.invoke(Method.java:597)
>    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:523)
>    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1383)
>    at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1379)
>    at java.security.AccessController.doPrivileged(Native Method)
>    at javax.security.auth.Subject.doAs(Subject.java:396)
>    at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
>    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1377)
>
>    at org.apache.hadoop.ipc.Client.call(Client.java:1030)
>    at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:224)
>    at $Proxy1.addBlock(Unknown Source)
>    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>    at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>    at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>    at java.lang.reflect.Method.invoke(Method.java:597)
>    at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:82)
>    at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:59)
>    at $Proxy1.addBlock(Unknown Source)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3104)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:2975)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2000(DFSClient.java:2255)
>    at
> org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2446)
> hduser@sujit:~/Desktop/hadoop/bin



-- 
Harsh J