You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-issues@hadoop.apache.org by "Ashutosh Gupta (Jira)" <ji...@apache.org> on 2022/10/10 02:07:00 UTC

[jira] [Updated] (HDFS-16801) TestObserverNode failing intermittently

     [ https://issues.apache.org/jira/browse/HDFS-16801?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Ashutosh Gupta updated HDFS-16801:
----------------------------------
    Affects Version/s: 3.3.4
                       3.3.3

> TestObserverNode failing intermittently
> ---------------------------------------
>
>                 Key: HDFS-16801
>                 URL: https://issues.apache.org/jira/browse/HDFS-16801
>             Project: Hadoop HDFS
>          Issue Type: Bug
>          Components: namenode, test
>    Affects Versions: 3.3.3, 3.3.4
>            Reporter: Ashutosh Gupta
>            Priority: Major
>
> TestObserverNode failing intermittently
>  
> {code:java}
> [ERROR] testMkdirsRaceWithObserverRead(org.apache.hadoop.hdfs.server.namenode.ha.TestObserverNode)  Time elapsed: 311.199 s  <<< ERROR!
> java.net.ConnectException: Call From 75e69096caae/172.17.0.2 to localhost:12852 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
> 	at sun.reflect.GeneratedConstructorAccessor118.newInstance(Unknown Source)
> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> 	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:948)
> 	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:863)
> 	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1649)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1590)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1487)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
> 	at com.sun.proxy.$Proxy31.mkdirs(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:677)
> 	at sun.reflect.GeneratedMethodAccessor22.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at org.apache.hadoop.hdfs.server.namenode.ha.ObserverReadProxyProvider$ObserverReadInvocationHandler.invoke(ObserverReadProxyProvider.java:518)
> 	at com.sun.proxy.$Proxy32.mkdirs(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor22.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
> 	at com.sun.proxy.$Proxy32.mkdirs(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2558)
> 	at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2534)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1484)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1481)
> 	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1498)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdir(DistributedFileSystem.java:1457)
> 	at org.apache.hadoop.hdfs.server.namenode.ha.TestObserverNode.testMkdirsRaceWithObserverRead(TestObserverNode.java:561)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
> 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
> 	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> 	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
> 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
> 	at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
> 	at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
> 	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
> 	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
> 	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
> 	at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
> 	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
> 	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
> 	at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
> 	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
> 	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
> 	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
> 	at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
> 	at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
> 	at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
> 	at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:290)
> 	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
> 	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
> 	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:384)
> 	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:345)
> 	at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:126)
> 	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:418)
> Caused by: java.net.ConnectException: Connection refused
> 	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
> 	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:716)
> 	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:205)
> 	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:601)
> 	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:736)
> 	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:858)
> 	at org.apache.hadoop.ipc.Client$Connection.access$3700(Client.java:436)
> 	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1710)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1534)
> 	... 59 more
>  {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-issues-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-issues-help@hadoop.apache.org