You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/04/30 13:38:57 UTC
[JENKINS] Lucene-Solr-Tests-8.x - Build # 1464 - Still Failing
Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/1464/
1 tests failed.
FAILED: org.apache.solr.index.hdfs.CheckHdfsIndexTest.doTest
Error Message:
Error from server at http://127.0.0.1:41339/ydtf/collection1: java.lang.NullPointerException at org.apache.solr.handler.admin.SystemInfoHandler.getSecurityInfo(SystemInfoHandler.java:326) at org.apache.solr.handler.admin.SystemInfoHandler.handleRequestBody(SystemInfoHandler.java:146) at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:211) at org.apache.solr.core.SolrCore.execute(SolrCore.java:2600) at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:803) at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:582) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:432) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:362) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604) at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:767) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:500) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103) at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938) at java.lang.Thread.run(Thread.java:748)
Stack Trace:
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://127.0.0.1:41339/ydtf/collection1: java.lang.NullPointerException
at org.apache.solr.handler.admin.SystemInfoHandler.getSecurityInfo(SystemInfoHandler.java:326)
at org.apache.solr.handler.admin.SystemInfoHandler.handleRequestBody(SystemInfoHandler.java:146)
at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:211)
at org.apache.solr.core.SolrCore.execute(SolrCore.java:2600)
at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:803)
at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:582)
at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:432)
at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:362)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:767)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
at org.eclipse.jetty.server.Server.handle(Server.java:500)
at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273)
at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)
at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)
at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
at java.lang.Thread.run(Thread.java:748)
at __randomizedtesting.SeedInfo.seed([DFA1F9CF887845A1:78E5416BE5C35618]:0)
at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:665)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:265)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:248)
at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:211)
at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:1003)
at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:1018)
at org.apache.solr.index.hdfs.CheckHdfsIndexTest.doTest(CheckHdfsIndexTest.java:120)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1081)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1053)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.lang.Thread.run(Thread.java:748)
Build Log:
[...truncated 15440 lines...]
[junit4] Suite: org.apache.solr.index.hdfs.CheckHdfsIndexTest
[junit4] 2> 2428042 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.s.SolrTestCase Setting 'solr.default.confdir' system property to test-framework derived value of '/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/server/solr/configsets/_default/conf'
[junit4] 2> 2428042 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 2> 2428042 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/data-dir-292-001
[junit4] 2> 2428042 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 2428043 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
[junit4] 2> 2428043 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /ydtf/
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 2428104 WARN (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 2428105 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 2428108 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 2428108 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 2428108 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 2428109 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@127397c9{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 2428217 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@373411ab{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/jetty-localhost_localdomain-41873-hadoop-hdfs-3_2_0-tests_jar-_-any-4901863484611699201.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/hdfs}
[junit4] 2> 2428218 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@15513968{HTTP/1.1, (http/1.1)}{localhost.localdomain:41873}
[junit4] 2> 2428218 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.Server Started @2428253ms
[junit4] 2> 2428291 WARN (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 2428292 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 2428294 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 2428294 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 2428294 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 2428296 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7ba9a4f4{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 2428394 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@48333554{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/jetty-localhost-38347-hadoop-hdfs-3_2_0-tests_jar-_-any-8620551481574773308.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/datanode}
[junit4] 2> 2428394 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@7854ad40{HTTP/1.1, (http/1.1)}{localhost:38347}
[junit4] 2> 2428394 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.Server Started @2428429ms
[junit4] 2> 2428423 WARN (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 2428424 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 2428432 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 2428432 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 2428432 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 2428434 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@505d91aa{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 2428540 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@4a63182b{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/jetty-localhost-35025-hadoop-hdfs-3_2_0-tests_jar-_-any-1360689847689684252.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/datanode}
[junit4] 2> 2428540 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@782629c{HTTP/1.1, (http/1.1)}{localhost:35025}
[junit4] 2> 2428540 INFO (SUITE-CheckHdfsIndexTest-seed#[DFA1F9CF887845A1]-worker) [ ] o.e.j.s.Server Started @2428575ms
[junit4] 2> 2428632 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xf2ff9264a8340767: Processing first storage report for DS-ca321339-eef0-4ce3-b3ac-ce46274d1130 from datanode 6e2b4644-dafd-438b-97ff-562435c392ea
[junit4] 2> 2428632 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xf2ff9264a8340767: from storage DS-ca321339-eef0-4ce3-b3ac-ce46274d1130 node DatanodeRegistration(127.0.0.1:43445, datanodeUuid=6e2b4644-dafd-438b-97ff-562435c392ea, infoPort=45389, infoSecurePort=0, ipcPort=39211, storageInfo=lv=-57;cid=testClusterID;nsid=1501910610;c=1588252808960), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 2428632 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xf2ff9264a8340767: Processing first storage report for DS-2803a8ee-6347-4a61-b3a3-a2d1f8d1633c from datanode 6e2b4644-dafd-438b-97ff-562435c392ea
[junit4] 2> 2428632 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xf2ff9264a8340767: from storage DS-2803a8ee-6347-4a61-b3a3-a2d1f8d1633c node DatanodeRegistration(127.0.0.1:43445, datanodeUuid=6e2b4644-dafd-438b-97ff-562435c392ea, infoPort=45389, infoSecurePort=0, ipcPort=39211, storageInfo=lv=-57;cid=testClusterID;nsid=1501910610;c=1588252808960), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 2428757 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x844fc290191cab8: Processing first storage report for DS-0d81bc36-7f7a-4a31-8f1b-6d043b7718ec from datanode b32f28a3-bc8b-4c80-bdeb-47910a2d34cb
[junit4] 2> 2428757 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x844fc290191cab8: from storage DS-0d81bc36-7f7a-4a31-8f1b-6d043b7718ec node DatanodeRegistration(127.0.0.1:34307, datanodeUuid=b32f28a3-bc8b-4c80-bdeb-47910a2d34cb, infoPort=43735, infoSecurePort=0, ipcPort=42061, storageInfo=lv=-57;cid=testClusterID;nsid=1501910610;c=1588252808960), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 2428757 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x844fc290191cab8: Processing first storage report for DS-9722c874-5fc7-44d3-837d-e86b8bc6cb43 from datanode b32f28a3-bc8b-4c80-bdeb-47910a2d34cb
[junit4] 2> 2428757 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x844fc290191cab8: from storage DS-9722c874-5fc7-44d3-837d-e86b8bc6cb43 node DatanodeRegistration(127.0.0.1:34307, datanodeUuid=b32f28a3-bc8b-4c80-bdeb-47910a2d34cb, infoPort=43735, infoSecurePort=0, ipcPort=42061, storageInfo=lv=-57;cid=testClusterID;nsid=1501910610;c=1588252808960), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 2428853 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 2428854 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 2428854 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer Starting server
[junit4] 2> 2428954 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer start zk server on port:34973
[junit4] 2> 2428954 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:34973
[junit4] 2> 2428954 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:34973
[junit4] 2> 2428954 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 34973
[junit4] 2> 2428955 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2428960 INFO (zkConnectionManagerCallback-29484-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2428960 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2428968 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2428972 INFO (zkConnectionManagerCallback-29486-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2428972 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2428973 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
[junit4] 2> 2428975 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
[junit4] 2> 2428976 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 2428981 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
[junit4] 2> 2428981 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
[junit4] 2> 2428982 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
[junit4] 2> 2428983 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
[junit4] 2> 2428983 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
[junit4] 2> 2428984 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 2428984 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
[junit4] 2> 2428985 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
[junit4] 2> 2428986 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
[junit4] 2> 2429072 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
[junit4] 2> 2429072 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 2429072 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
[junit4] 2> 2429072 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 2429081 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 2429081 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 2429081 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@75ff82fb{/ydtf,null,AVAILABLE}
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.AbstractConnector Started ServerConnector@62efee36{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:42375}
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.s.Server Started @2429118ms
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=hdfs://localhost.localdomain:33397/hdfs__localhost.localdomain_33397__home_jenkins_jenkins-slave_workspace_Lucene-Solr-Tests-8.x_solr_build_solr-core_test_J2_temp_solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001_tempDir-002_control_data, hostContext=/ydtf, hostPort=42375, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/control-001/cores}
[junit4] 2> 2429083 ERROR (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 8.6.0
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr
[junit4] 2> 2429083 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2020-04-30T13:20:09.990Z
[junit4] 2> 2429084 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2429087 INFO (zkConnectionManagerCallback-29488-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2429087 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2429188 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 2429188 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/control-001/solr.xml
[junit4] 2> 2429191 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 2429191 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 2429192 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 2429568 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 2429568 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4c04423e[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2429568 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4c04423e[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2429571 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1b79a04d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2429571 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1b79a04d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2429572 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34973/solr
[junit4] 2> 2429573 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2429574 INFO (zkConnectionManagerCallback-29499-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2429574 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2429676 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2429681 INFO (zkConnectionManagerCallback-29501-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2429681 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2429731 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:42375_ydtf
[junit4] 2> 2429732 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.Overseer Overseer (id=73674655713394692-127.0.0.1:42375_ydtf-n_0000000000) starting
[junit4] 2> 2429737 INFO (OverseerStateUpdate-73674655713394692-127.0.0.1:42375_ydtf-n_0000000000) [n:127.0.0.1:42375_ydtf ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:42375_ydtf
[junit4] 2> 2429737 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:42375_ydtf
[junit4] 2> 2429739 INFO (zkCallback-29500-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 2429741 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.p.PackageLoader /packages.json updated to version -1
[junit4] 2> 2429741 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.CoreContainer Not all security plugins configured! authentication=you make it. Consider configuring authentication/authorization before exposing Solr to users internal or authorization=external. See https://s.apache.org/solrsecurity for more info. Solr is only as secure as disableddisabled
[junit4] 2> 2429755 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 2429774 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2429781 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2429781 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2429782 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [n:127.0.0.1:42375_ydtf ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/control-001/cores
[junit4] 2> 2429793 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2429794 INFO (zkConnectionManagerCallback-29518-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2429794 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2429795 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 2429796 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34973/solr ready
[junit4] 2> 2429797 INFO (qtp1191180179-46174) [n:127.0.0.1:42375_ydtf ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:42375_ydtf&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 2429799 INFO (OverseerThreadFactory-29508-thread-1-processing-n:127.0.0.1:42375_ydtf) [n:127.0.0.1:42375_ydtf ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
[junit4] 2> 2429904 INFO (qtp1191180179-46176) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 2429906 INFO (qtp1191180179-46177) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 2429908 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 2429909 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf x:control_collection_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 2430919 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.6.0
[junit4] 2> 2430933 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=test
[junit4] 2> 2431033 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 2431044 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from configset conf1, trusted=true
[junit4] 2> 2431044 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2431045 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:33397/solr_hdfs_home
[junit4] 2> 2431045 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 2431045 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:33397/solr_hdfs_home/control_collection/core_node2/data/]
[junit4] 2> 2431046 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:33397/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
[junit4] 2> 2431053 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 2431053 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 2431053 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new global HDFS BlockCache
[junit4] 2> 2431088 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 2431089 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:33397/solr_hdfs_home/control_collection/core_node2/data
[junit4] 2> 2431104 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:33397/solr_hdfs_home/control_collection/core_node2/data/index
[junit4] 2> 2431110 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 2431110 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 2431113 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 2431114 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=43, maxMergeAtOnceExplicit=49, maxMergedSegmentMB=73.1484375, floorSegmentMB=0.96484375, forceMergeDeletesPctAllowed=7.1606453137305595, segmentsPerTier=18.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0, deletesPctAllowed=42.07891698716507
[junit4] 2> 2431150 WARN (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 2431200 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 2431201 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 2431201 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
[junit4] 2> 2431215 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 2431215 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 2431217 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=4, maxMergeAtOnceExplicit=6, maxMergedSegmentMB=71.0595703125, floorSegmentMB=1.2822265625, forceMergeDeletesPctAllowed=25.390962250498777, segmentsPerTier=33.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7501584248727483, deletesPctAllowed=49.92592499571357
[junit4] 2> 2431230 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@56d7d327[control_collection_shard1_replica_n1] main]
[junit4] 2> 2431231 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 2431232 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 2431232 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
[junit4] 2> 2431232 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1665403780741464064
[junit4] 2> 2431235 INFO (searcherExecutor-29520-thread-1-processing-n:127.0.0.1:42375_ydtf x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@56d7d327[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 2431238 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
[junit4] 2> 2431238 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
[junit4] 2> 2431240 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
[junit4] 2> 2431240 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
[junit4] 2> 2431240 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:42375/ydtf/control_collection_shard1_replica_n1/
[junit4] 2> 2431240 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 2431241 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:42375/ydtf/control_collection_shard1_replica_n1/ has no replicas
[junit4] 2> 2431241 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/73674655713394692-core_node2-n_0000000000
[junit4] 2> 2431242 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:42375/ydtf/control_collection_shard1_replica_n1/ shard1
[junit4] 2> 2431343 INFO (zkCallback-29500-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 2431343 INFO (zkCallback-29500-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 2431344 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 2431345 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1436
[junit4] 2> 2431348 INFO (qtp1191180179-46174) [n:127.0.0.1:42375_ydtf ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 2431445 INFO (zkCallback-29500-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 2431445 INFO (zkCallback-29500-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 2431446 INFO (zkCallback-29500-thread-3) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 2431446 INFO (qtp1191180179-46174) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:42375_ydtf&wt=javabin&version=2} status=0 QTime=1649
[junit4] 2> 2431446 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: control_collection
[junit4] 2> 2431550 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2431551 INFO (zkConnectionManagerCallback-29529-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2431551 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2431552 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 2431553 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34973/solr ready
[junit4] 2> 2431553 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
[junit4] 2> 2431554 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 2431556 INFO (OverseerCollectionConfigSetProcessor-73674655713394692-127.0.0.1:42375_ydtf-n_0000000000) [n:127.0.0.1:42375_ydtf ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 2431557 INFO (OverseerThreadFactory-29508-thread-2-processing-n:127.0.0.1:42375_ydtf) [n:127.0.0.1:42375_ydtf ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
[junit4] 2> 2431760 WARN (OverseerThreadFactory-29508-thread-2-processing-n:127.0.0.1:42375_ydtf) [n:127.0.0.1:42375_ydtf ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
[junit4] 2> 2431761 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 2431762 INFO (qtp1191180179-46173) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2} status=0 QTime=208
[junit4] 2> 2431762 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 2431762 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 2431763 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 2431763 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 2431763 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 2431763 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 2431763 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=1
[junit4] 2> 2431843 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/shard-1-001 of type NRT for shard1
[junit4] 2> 2431844 WARN (closeThreadPool-29530-thread-1) [ ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
[junit4] 2> 2431844 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 2431845 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
[junit4] 2> 2431845 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 2431856 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 2431856 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 2431856 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 2431857 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@650a097c{/ydtf,null,AVAILABLE}
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@217357ed{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:41459}
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.e.j.s.Server Started @2431893ms
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=hdfs://localhost.localdomain:33397/hdfs__localhost.localdomain_33397__home_jenkins_jenkins-slave_workspace_Lucene-Solr-Tests-8.x_solr_build_solr-core_test_J2_temp_solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001_tempDir-002_jetty1, replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/ydtf, hostPort=41459, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/shard-1-001/cores}
[junit4] 2> 2431858 ERROR (closeThreadPool-29530-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 8.6.0
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr
[junit4] 2> 2431858 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2020-04-30T13:20:12.765Z
[junit4] 2> 2431862 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2431864 INFO (zkConnectionManagerCallback-29532-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2431864 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2431965 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 2431965 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/shard-1-001/solr.xml
[junit4] 2> 2431969 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 2431969 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 2431970 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 2432778 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 2432779 WARN (closeThreadPool-29530-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@44508cb3[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2432779 WARN (closeThreadPool-29530-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@44508cb3[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2432782 WARN (closeThreadPool-29530-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@f500b32[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2432782 WARN (closeThreadPool-29530-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@f500b32[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 2432783 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34973/solr
[junit4] 2> 2432783 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2432784 INFO (zkConnectionManagerCallback-29543-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2432784 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2432885 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 2432886 INFO (zkConnectionManagerCallback-29545-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 2432886 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 2432890 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 2432894 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.ZkController Publish node=127.0.0.1:41459_ydtf as DOWN
[junit4] 2> 2432895 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 2432895 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41459_ydtf
[junit4] 2> 2432896 INFO (zkCallback-29500-thread-4) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 2432897 INFO (zkCallback-29544-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 2432897 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.p.PackageLoader /packages.json updated to version -1
[junit4] 2> 2432897 WARN (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.CoreContainer Not all security plugins configured! authentication=you make it. Consider configuring authentication/authorization before exposing Solr to users internal or authorization=external. See https://s.apache.org/solrsecurity for more info. Solr is only as secure as disableddisabled
[junit4] 2> 2432908 INFO (zkCallback-29528-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 2432911 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 2432929 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2432940 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2432940 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2432941 INFO (closeThreadPool-29530-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/shard-1-001/cores
[junit4] 2> 2432955 INFO (closeThreadPool-29530-thread-1) [ ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:41459_ydtf
[junit4] 2> 2432960 INFO (qtp625792924-46240) [n:127.0.0.1:41459_ydtf ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:41459_ydtf&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 2432976 INFO (OverseerCollectionConfigSetProcessor-73674655713394692-127.0.0.1:42375_ydtf-n_0000000000) [n:127.0.0.1:42375_ydtf ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 2432979 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 2432982 INFO (qtp1191180179-46174) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={wt=javabin&version=2&key=solr.core.control_collection.shard1.replica_n1:INDEX.sizeInBytes} status=0 QTime=3
[junit4] 2> 2432984 INFO (qtp1191180179-46176) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 2432985 INFO (qtp625792924-46243) [n:127.0.0.1:41459_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 2432987 INFO (qtp1191180179-46175) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={wt=javabin&version=2&key=solr.core.control_collection.shard1.replica_n1:INDEX.sizeInBytes} status=0 QTime=2
[junit4] 2> 2432988 INFO (qtp1191180179-46177) [n:127.0.0.1:42375_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 2432989 INFO (OverseerThreadFactory-29508-thread-3-processing-n:127.0.0.1:42375_ydtf) [n:127.0.0.1:42375_ydtf c:collection1 s:shard1 ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:41459_ydtf for creating new replica of shard shard1 for collection collection1
[junit4] 2> 2432992 INFO (OverseerThreadFactory-29508-thread-3-processing-n:127.0.0.1:42375_ydtf) [n:127.0.0.1:42375_ydtf c:collection1 s:shard1 ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
[junit4] 2> 2432998 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf x:collection1_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 2434007 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.6.0
[junit4] 2> 2434022 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=test
[junit4] 2> 2434143 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 2434157 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n1' using configuration from configset conf1, trusted=true
[junit4] 2> 2434157 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n1' (registry 'solr.core.collection1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@312a5e10
[junit4] 2> 2434158 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:33397/solr_hdfs_home
[junit4] 2> 2434158 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 2434158 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SolrCore [[collection1_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.index.hdfs.CheckHdfsIndexTest_DFA1F9CF887845A1-001/shard-1-001/cores/collection1_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:33397/solr_hdfs_home/collection1/core_node2/data/]
[junit4] 2> 2434159 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:33397/solr_hdfs_home/collection1/core_node2/data/snapshot_metadata
[junit4] 2> 2434165 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 2434165 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 2434170 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 2434180 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:33397/solr_hdfs_home/collection1/core_node2/data
[junit4] 2> 2434195 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:33397/solr_hdfs_home/collection1/core_node2/data/index
[junit4] 2> 2434201 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 2434201 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 2434213 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 2434214 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=43, maxMergeAtOnceExplicit=49, maxMergedSegmentMB=73.1484375, floorSegmentMB=0.96484375, forceMergeDeletesPctAllowed=7.1606453137305595, segmentsPerTier=18.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0, deletesPctAllowed=42.07891698716507
[junit4] 2> 2434263 WARN (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 2434316 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 2434316 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 2434316 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
[junit4] 2> 2434343 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 2434343 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 2434345 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=4, maxMergeAtOnceExplicit=6, maxMergedSegmentMB=71.0595703125, floorSegmentMB=1.2822265625, forceMergeDeletesPctAllowed=25.390962250498777, segmentsPerTier=33.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7501584248727483, deletesPctAllowed=49.92592499571357
[junit4] 2> 2434356 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@5fa52f23[collection1_shard1_replica_n1] main]
[junit4] 2> 2434356 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 2434357 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 2434357 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
[junit4] 2> 2434358 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1665403784019312640
[junit4] 2> 2434361 INFO (searcherExecutor-29556-thread-1-processing-n:127.0.0.1:41459_ydtf x:collection1_shard1_replica_n1 c:collection1 s:shard1) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@5fa52f23[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 2434367 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard1 to Terms{values={core_node2=0}, version=0}
[junit4] 2> 2434367 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/collection1/leaders/shard1
[junit4] 2> 2434369 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
[junit4] 2> 2434369 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
[junit4] 2> 2434369 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:41459/ydtf/collection1_shard1_replica_n1/
[junit4] 2> 2434369 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 2434369 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:41459/ydtf/collection1_shard1_replica_n1/ has no replicas
[junit4] 2> 2434369 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/collection1/leaders/shard1/leader after winning as /collections/collection1/leader_elect/shard1/election/73674655713394697-core_node2-n_0000000000
[junit4] 2> 2434370 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:41459/ydtf/collection1_shard1_replica_n1/ shard1
[junit4] 2> 2434472 INFO (zkCallback-29544-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
[junit4] 2> 2434472 INFO (zkCallback-29544-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
[junit4] 2> 2434472 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 2434474 INFO (qtp625792924-46242) [n:127.0.0.1:41459_ydtf ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1476
[junit4] 2> 2434476 INFO (qtp625792924-46240) [n:127.0.0.1:41459_ydtf c:collection1 ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:41459_ydtf&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2} status=0 QTime=1516
[junit4] 2> 2434477 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: collection1
[junit4] 2> 2434575 INFO (zkCallback-29528-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
[junit4] 2> 2434575 INFO (zkCallback-29544-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
[junit4] 2> 2434575 INFO (zkCallback-29544-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
[junit4] 2> 2434575 INFO (zkCallback-29544-thread-3) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
[junit4] 2> 2434580 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.SolrTestCaseJ4 ###Starting testChecksumsOnly
[junit4] 2> 2434981 INFO (OverseerCollectionConfigSetProcessor-73674655713394692-127.0.0.1:42375_ydtf-n_0000000000) [n:127.0.0.1:42375_ydtf ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000004 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 2436116 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:33397/solr
[junit4] 2> 2436118 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnly-seed#[DFA1F9CF887845A1]) [ ] o.a.s.SolrTestCaseJ4 ###Ending testChecksumsOnly
[junit4] 2> 2436226 INFO (closeThreadPool-29563-thread-2) [ ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=687375109
[junit4] 2> 2436226 INFO (closeThreadPool-29563-thread-2) [ ] o.a.s.c.ZkController Remove node as live in ZooKeeper:/live_nodes/127.0.0.1:42375_ydtf
[junit4] 2> 2436227 INFO (closeThreadPool-29563-thread-2) [ ] o.a.s.c.ZkController Publish this node as DOWN...
[junit4] 2> 2436227 INFO (closeThreadPool-29563-thread-2) [ ] o.a.s.c.ZkController Publish node=127.0.0.1:42375_ydtf as DOWN
[junit4] 2> 2436230 INFO (closeThreadPool-29563-thread-4) [ ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=677110681
[junit4] 2> 2436230 INFO (closeThreadPool-29563-thread-4) [ ] o.a.s.c.ZkController Remove node as live in ZooKeeper:/live_nodes/127.0.0.1:41459_ydtf
[junit4] 2> 2436230 INFO (closeThreadPool-29563-thread-4) [ ] o.a.s.c.ZkController Publish this node as DOWN...
[junit4] 2> 2436230 INFO (closeThreadPool-29563-thread-4) [ ] o.a.s.c.ZkController Publish node=127.0.0.1:41459_ydtf as DOWN
[junit4] 2> 2436238 INFO (coreCloseExecutor-29570-thread-1) [n:127.0.0.1:42375_ydtf ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] CLOSING SolrCore org.apache.solr.core.SolrCore@30cc5929
[junit4] 2> 2436238 INFO (coreCloseExecutor-29570-thread-1) [n:127.0.0.1:42375_ydtf ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.control_collection.shard1.replica_n1 tag=SolrCore@30cc5929
[junit4] 2> 2436238 INFO (coreCloseExecutor-29570-thread-1) [n:127.0.0.1:42375_ydtf ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@deaf22f: rootName = null, domain = solr.core.control_collection.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.control_collection.shard1.replica_n1/com.codahale.metrics.MetricRegistry@3f5bfb72
[junit4] 2> 2436240 INFO (coreCloseExecutor-29571-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] CLOSING SolrCore org.apache.solr.core.SolrCore@3a65ce08
[junit4] 2> 2436240 INFO (coreCloseExecutor-29571-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.collection1.shard1.replica_n1 tag=SolrCore@3a65ce08
[junit4] 2> 2436240 INFO (coreCloseExecutor-29571-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4e52380e: rootName = null, domain = solr.core.collection1.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.collection1.shard1.replica_n1/com.codahale.metrics.MetricRegistry@3e80e1ca
[junit4] 2> 2436257 INFO (coreCloseExecutor-29570-thread-1) [n:127.0.0.1:42375_ydtf ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.control_collection.shard1.leader tag=SolrCore@30cc5929
[junit4] 2> 2436259 INFO (coreCloseExecutor-29571-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.collection1.shard1.leader tag=SolrCore@3a65ce08
[junit4] 2> 2436260 INFO (coreCloseExecutor-29571-thread-1) [n:127.0.0.1:41459_ydtf ] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter.close() ... SKIPPED (unnecessary).
[junit4] 2> 2436264 INFO (coreCloseExecutor-29570-thread-1) [n:127.0.0.1:42375_ydtf ] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter.close() ... SKIPPED (unnecessary).
[junit4] 2> 2436269 INFO (coreCloseExecutor-29570-thread-1
[...truncated too long message...]
sting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 33 leaked thread(s).
[junit4] 2> NOTE: test params are: codec=CheapBastard, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@6c6f73db), locale=ar-JO, timezone=Etc/GMT-14
[junit4] 2> NOTE: Linux 4.15.0-54-generic amd64/Oracle Corporation 1.8.0_191 (64-bit)/cpus=4,threads=4,free=115936344,total=525860864
[junit4] 2> NOTE: All tests run in this JVM: [BlockDirectoryTest, TestReplicationHandlerDiskOverFlow, CSVRequestHandlerTest, MultiThreadedOCPTest, ClassificationUpdateProcessorIntegrationTest, HttpPartitionTest, TestDistributedGrouping, TestJsonFacetsStatsParsing, TestHighFrequencyDictionaryFactory, TestSolrConfigHandlerConcurrent, PKIAuthenticationIntegrationTest, HighlighterWithoutStoredIdTest, TestReloadDeadlock, SolrJmxReporterCloudTest, TestJsonFacetErrors, DynamicMapsTest, HdfsLockFactoryTest, AsyncCallRequestStatusResponseTest, ConcurrentDeleteAndCreateCollectionTest, DistribDocExpirationUpdateProcessorTest, TestBinaryResponseWriter, TestRangeQuery, TestFieldCacheSortRandom, TestSolrDeletionPolicy2, TestConfig, BasicAuthStandaloneTest, UUIDUpdateProcessorFallbackTest, DocExpirationUpdateProcessorFactoryTest, IndexSchemaRuntimeFieldTest, TriggerIntegrationTest, TestJoin, TestSimComputePlanAction, SchemaVersionSpecificBehaviorTest, CustomHighlightComponentTest, TestSimpleTrackingShardHandler, TestRTimerTree, CdcrWithNodesRestartsTest, ConnectionReuseTest, RequestLoggingTest, TestDistributedMap, QueryEqualityTest, TestStressRecovery, DistributedDebugComponentTest, TestPHPSerializedResponseWriter, UnloadDistributedZkTest, TestSchemaResource, SolrMetricReporterTest, TestCloudManagedSchema, LoggingHandlerTest, TestCollapseQParserPlugin, BadIndexSchemaTest, LeaderElectionIntegrationTest, TestSort, IgnoreLargeDocumentProcessorFactoryTest, ParsingFieldUpdateProcessorsTest, XMLAtomicUpdateMultivalueTest, TestExactStatsCache, AddSchemaFieldsUpdateProcessorFactoryTest, DistributedFacetPivotSmallTest, TestIndexSearcher, TestOnReconnectListenerSupport, MetricsConfigTest, TestFoldingMultitermQuery, BJQParserTest, TestSolr4Spatial2, RoutingToNodesWithPropertiesTest, NodeAddedTriggerIntegrationTest, TestTlogReplica, PrimitiveFieldTypeTest, ZookeeperStatusHandlerTest, ProtectedTermFilterFactoryTest, DistributedFacetPivotLargeTest, AnalyticsMergeStrategyTest, SolrTestCaseJ4Test, TestDefaultStatsCache, SubstringBytesRefFilterTest, TestQueryUtils, TestDistributedTracing, ComputePlanActionTest, TestXmlQParserPlugin, TestInPlaceUpdatesRequiredField, AuditLoggerPluginTest, TestFieldCache, TestClassicSimilarityFactory, TestUninvertingReader, TestCloudRecovery2, TestChildDocTransformer, OutputWriterTest, TestFieldSortValues, LegacyCloudClusterPropTest, OrderedExecutorTest, TestConfigSetsAPI, UpdateLogTest, RAMDirectoryFactoryTest, TestCryptoKeys, SolrCloudReportersTest, DeleteShardTest, ReturnFieldsTest, TestUtilizeNode, DOMUtilTest, TestConfigSetProperties, ChaosMonkeyNothingIsSafeWithPullReplicasTest, SearchRateTriggerIntegrationTest, DefaultValueUpdateProcessorTest, HdfsBasicDistributedZkTest, TestTolerantSearch, JsonLoaderTest, TestDocumentBuilder, TestCloudDeleteByQuery, TestHdfsBackupRestoreCore, TestPointFields, TestConfigsApi, TestLegacyFieldCache, FullHLLTest, PhrasesIdentificationComponentTest, CursorMarkTest, ClusterStateMockUtilTest, MigrateRouteKeyTest, TaggingAttributeTest, TestExportWriter, TestRecovery, OverseerTest, ZkStateReaderTest, TestCollationFieldDocValues, SecurityConfHandlerTest, TestSimLargeCluster, TestFileDictionaryLookup, RemoteQueryErrorTest, ResponseLogComponentTest, TestSolrCoreSnapshots, TestPerFieldSimilarityWithDefaultOverride, TestUnifiedSolrHighlighterWithoutStoredId, TestMacros, BasicDistributedZkTest, TestSchemalessBufferedUpdates, AtomicUpdatesTest, TestElisionMultitermQuery, TestMergePolicyConfig, CopyFieldTest, RecoveryAfterSoftCommitTest, TestNoOpRegenerator, FileBasedSpellCheckerTest, TestDocBasedVersionConstraints, TestLocalFSCloudBackupRestore, SystemInfoHandlerTest, XsltUpdateRequestHandlerTest, TestSolrCoreProperties, TestTrie, TestDeprecatedFilters, TestReversedWildcardFilterFactory, TestLuceneIndexBackCompat, ClusterStateUpdateTest, CollectionsAPISolrJTest, LeaderFailureAfterFreshStartTest, OverseerRolesTest, ReplaceNodeNoTargetTest, ReplaceNodeTest, ShardRoutingTest, SolrXmlInZkTest, SplitShardTest, SystemCollectionCompatTest, TestCloudPhrasesIdentificationComponent, TestCloudPivotFacet, TestDeleteCollectionOnDownNodes, TestLeaderElectionZkExpiry, TestQueryingOnDownCollection, TestRandomFlRTGCloud, TestRequestForwarding, TestShortCircuitedRequests, TestStressLiveNodes, SplitByPrefixTest, TestRequestStatusCollectionAPI, ExecutePlanActionTest, HdfsAutoAddReplicasIntegrationTest, HttpTriggerListenerTest, IndexSizeTriggerTest, MetricTriggerIntegrationTest, NodeAddedTriggerTest, NodeLostTriggerTest, ScheduledMaintenanceTriggerTest, ScheduledTriggerIntegrationTest, TestPolicyCloud, CdcrOpsAndBoundariesTest, CdcrReplicationHandlerTest, HdfsChaosMonkeySafeLeaderTest, HdfsRecoveryZkTest, HdfsRestartWhileUpdatingTest, HdfsThreadLeakTest, HdfsTlogReplayBufferedWhileIndexingTest, HdfsWriteToMultipleCollectionsTest, StressHdfsTest, TestClusterStateMutator, ZkStateWriterTest, RuleEngineTest, AlternateDirectoryTest, ByteBuffersDirectoryFactoryTest, ConfigureRecoveryStrategyTest, CoreSorterTest, ExitableDirectoryReaderTest, HdfsDirectoryFactoryTest, SolrCoreCheckLockOnStartupTest, TestBackupRepositoryFactory, TestConfigOverlay, TestCoreContainer, TestDirectoryFactory, TestDynamicLoadingUrl, TestReloadAndDeleteDocs, DocumentAnalysisRequestHandlerTest, TestCSVLoader, TestConfigReload, TestRestoreCore, TestSystemCollAutoCreate, V2ApiIntegrationTest, AutoscalingHistoryHandlerTest, CoreAdminCreateDiscoverTest, CoreAdminOperationTest, CoreAdminRequestStatusTest, DaemonStreamApiTest, HealthCheckHandlerTest, MBeansHandlerTest, DistributedQueryComponentOptimizationTest, DistributedTermsComponentTest, SearchHandlerTest, SuggestComponentContextFilterQueryTest, TermVectorComponentTest, TermsComponentTest, JavabinLoaderTest, RandomizedTaggerTest, HighlighterConfigTest, TestSlowCompositeReaderWrapper, WrapperMergePolicyFactoryTest, CheckHdfsIndexTest]
[junit4] Completed [713/907 (1!)] on J2 in 37.07s, 5 tests, 1 error, 1 skipped <<< FAILURES!
[...truncated 38739 lines...]
-ecj-javadoc-lint-src:
[mkdir] Created dir: /tmp/ecj1805907511
[ecj-lint] Compiling 931 source files to /tmp/ecj1805907511
[ecj-lint] ----------
[ecj-lint] 1. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java (at line 523)
[ecj-lint] throw new CorruptIndexException("misplaced codec footer (file truncated?): length=" + in.length() + " but footerLength==" + footerLength(), input);
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'in' is not closed at this location
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 2. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java (at line 166)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 3. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java (at line 148)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, VECTORS_INDEX_EXTENSION_PREFIX, VECTORS_INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java (at line 50)
[ecj-lint] import org.apache.lucene.util.automaton.ByteRunAutomaton;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The import org.apache.lucene.util.automaton.ByteRunAutomaton is never used
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 5. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java (at line 742)
[ecj-lint] Integer q = newstate.get(statesSet);
[ecj-lint] ^^^^^^^^^
[ecj-lint] Unlikely argument type SortedIntSet for get(Object) on a Map<SortedIntSet.FrozenIntSet,Integer>
[ecj-lint] ----------
[ecj-lint] 5 problems (1 error, 4 warnings)
BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:634: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/build.xml:201: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2127: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2166: Compile failed; see the compiler error output for details.
Total time: 70 minutes 36 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
[JENKINS] Lucene-Solr-Tests-8.x - Build # 1467 - Still Failing
Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/1467/
All tests passed
Build Log:
[...truncated 54220 lines...]
-ecj-javadoc-lint-src:
[mkdir] Created dir: /tmp/ecj245225216
[ecj-lint] Compiling 931 source files to /tmp/ecj245225216
[ecj-lint] ----------
[ecj-lint] 1. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java (at line 523)
[ecj-lint] throw new CorruptIndexException("misplaced codec footer (file truncated?): length=" + in.length() + " but footerLength==" + footerLength(), input);
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'in' is not closed at this location
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 2. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java (at line 166)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 3. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java (at line 148)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, VECTORS_INDEX_EXTENSION_PREFIX, VECTORS_INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java (at line 50)
[ecj-lint] import org.apache.lucene.util.automaton.ByteRunAutomaton;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The import org.apache.lucene.util.automaton.ByteRunAutomaton is never used
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 5. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java (at line 742)
[ecj-lint] Integer q = newstate.get(statesSet);
[ecj-lint] ^^^^^^^^^
[ecj-lint] Unlikely argument type SortedIntSet for get(Object) on a Map<SortedIntSet.FrozenIntSet,Integer>
[ecj-lint] ----------
[ecj-lint] 5 problems (1 error, 4 warnings)
BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:634: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/build.xml:201: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2127: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2166: Compile failed; see the compiler error output for details.
Total time: 121 minutes 55 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
[JENKINS] Lucene-Solr-Tests-8.x - Build # 1466 - Still Failing
Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/1466/
All tests passed
Build Log:
[...truncated 54166 lines...]
-ecj-javadoc-lint-src:
[mkdir] Created dir: /tmp/ecj1139083319
[ecj-lint] Compiling 931 source files to /tmp/ecj1139083319
[ecj-lint] ----------
[ecj-lint] 1. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java (at line 523)
[ecj-lint] throw new CorruptIndexException("misplaced codec footer (file truncated?): length=" + in.length() + " but footerLength==" + footerLength(), input);
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'in' is not closed at this location
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 2. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java (at line 166)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 3. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java (at line 148)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, VECTORS_INDEX_EXTENSION_PREFIX, VECTORS_INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java (at line 50)
[ecj-lint] import org.apache.lucene.util.automaton.ByteRunAutomaton;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The import org.apache.lucene.util.automaton.ByteRunAutomaton is never used
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 5. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java (at line 742)
[ecj-lint] Integer q = newstate.get(statesSet);
[ecj-lint] ^^^^^^^^^
[ecj-lint] Unlikely argument type SortedIntSet for get(Object) on a Map<SortedIntSet.FrozenIntSet,Integer>
[ecj-lint] ----------
[ecj-lint] 5 problems (1 error, 4 warnings)
BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:634: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/build.xml:201: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2127: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2166: Compile failed; see the compiler error output for details.
Total time: 72 minutes 7 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
[JENKINS] Lucene-Solr-Tests-8.x - Build # 1465 - Still Failing
Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/1465/
1 tests failed.
FAILED: org.apache.solr.index.hdfs.CheckHdfsIndexTest.doTest
Error Message:
Error from server at http://127.0.0.1:45805/collection1: java.lang.NullPointerException at org.apache.solr.handler.admin.SystemInfoHandler.getSecurityInfo(SystemInfoHandler.java:326) at org.apache.solr.handler.admin.SystemInfoHandler.handleRequestBody(SystemInfoHandler.java:146) at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:211) at org.apache.solr.core.SolrCore.execute(SolrCore.java:2600) at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:803) at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:582) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:432) at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:362) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604) at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166) at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:767) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:500) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103) at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938) at java.lang.Thread.run(Thread.java:748)
Stack Trace:
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://127.0.0.1:45805/collection1: java.lang.NullPointerException
at org.apache.solr.handler.admin.SystemInfoHandler.getSecurityInfo(SystemInfoHandler.java:326)
at org.apache.solr.handler.admin.SystemInfoHandler.handleRequestBody(SystemInfoHandler.java:146)
at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:211)
at org.apache.solr.core.SolrCore.execute(SolrCore.java:2600)
at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:803)
at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:582)
at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:432)
at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:362)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:767)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
at org.eclipse.jetty.server.Server.handle(Server.java:500)
at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273)
at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)
at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)
at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
at java.lang.Thread.run(Thread.java:748)
at __randomizedtesting.SeedInfo.seed([C69240967B2E1AE5:61D6F8321695095C]:0)
at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:665)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:265)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:248)
at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:211)
at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:1003)
at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:1018)
at org.apache.solr.index.hdfs.CheckHdfsIndexTest.doTest(CheckHdfsIndexTest.java:120)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1081)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1053)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.lang.Thread.run(Thread.java:748)
Build Log:
[...truncated 13490 lines...]
[junit4] Suite: org.apache.solr.index.hdfs.CheckHdfsIndexTest
[junit4] 2> 344745 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.SolrTestCase Setting 'solr.default.confdir' system property to test-framework derived value of '/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/server/solr/configsets/_default/conf'
[junit4] 2> 344746 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/data-dir-39-001
[junit4] 2> 344746 WARN (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=11 numCloses=11
[junit4] 2> 344746 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 344747 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, ssl=NaN, value=NaN, clientAuth=NaN)
[junit4] 2> 344747 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 2> 344747 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 344940 WARN (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 344942 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 344988 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 344988 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 344988 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 344989 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@40e6d4b5{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 345089 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@25c3ab12{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost_localdomain-37671-hadoop-hdfs-3_2_0-tests_jar-_-any-4288266329912229034.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/hdfs}
[junit4] 2> 345090 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@7619f542{HTTP/1.1, (http/1.1)}{localhost.localdomain:37671}
[junit4] 2> 345090 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.Server Started @345117ms
[junit4] 2> 345176 WARN (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 345177 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 345177 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 345177 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 345178 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 345178 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@39a8c1d7{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 345278 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@7fa46280{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-44867-hadoop-hdfs-3_2_0-tests_jar-_-any-510217292959700805.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/datanode}
[junit4] 2> 345279 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@130f265{HTTP/1.1, (http/1.1)}{localhost:44867}
[junit4] 2> 345279 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.Server Started @345306ms
[junit4] 2> 345599 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xd09a006e07fdb618: Processing first storage report for DS-42136e70-170d-43af-b9e7-4ef5890afe6e from datanode dbb39209-6d23-4d86-8394-01740c9f3c51
[junit4] 2> 345599 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xd09a006e07fdb618: from storage DS-42136e70-170d-43af-b9e7-4ef5890afe6e node DatanodeRegistration(127.0.0.1:34095, datanodeUuid=dbb39209-6d23-4d86-8394-01740c9f3c51, infoPort=40711, infoSecurePort=0, ipcPort=41661, storageInfo=lv=-57;cid=testClusterID;nsid=2092358094;c=1588260881449), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 345599 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xd09a006e07fdb618: Processing first storage report for DS-34875500-2d75-4773-8e62-6c5d1b2fd79c from datanode dbb39209-6d23-4d86-8394-01740c9f3c51
[junit4] 2> 345599 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xd09a006e07fdb618: from storage DS-34875500-2d75-4773-8e62-6c5d1b2fd79c node DatanodeRegistration(127.0.0.1:34095, datanodeUuid=dbb39209-6d23-4d86-8394-01740c9f3c51, infoPort=40711, infoSecurePort=0, ipcPort=41661, storageInfo=lv=-57;cid=testClusterID;nsid=2092358094;c=1588260881449), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 345726 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 345735 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 345736 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer Starting server
[junit4] 2> 345835 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer start zk server on port:44165
[junit4] 2> 345835 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:44165
[junit4] 2> 345835 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:44165
[junit4] 2> 345835 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 44165
[junit4] 2> 345837 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 345840 INFO (zkConnectionManagerCallback-3497-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 345840 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 345850 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 345852 INFO (zkConnectionManagerCallback-3499-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 345852 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 345853 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
[junit4] 2> 345854 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
[junit4] 2> 345855 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 345856 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
[junit4] 2> 345856 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
[junit4] 2> 345857 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
[junit4] 2> 345858 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
[junit4] 2> 345858 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
[junit4] 2> 345859 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 345860 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
[junit4] 2> 345860 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
[junit4] 2> 345861 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
[junit4] 2> 345946 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
[junit4] 2> 345946 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 345946 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
[junit4] 2> 345946 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 345952 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 345952 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 345952 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@73d97941{/,null,AVAILABLE}
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.AbstractConnector Started ServerConnector@bc784e5{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:37127}
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.s.Server Started @345980ms
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=hdfs://localhost.localdomain:45323/hdfs__localhost.localdomain_45323__home_jenkins_jenkins-slave_workspace_Lucene-Solr-Tests-8.x_solr_build_solr-core_test_J0_temp_solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001_tempDir-002_control_data, replicaType=NRT, hostContext=/, hostPort=37127, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/control-001/cores}
[junit4] 2> 345953 ERROR (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 8.6.0
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr
[junit4] 2> 345953 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2020-04-30T15:34:42.628Z
[junit4] 2> 345956 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 345960 INFO (zkConnectionManagerCallback-3501-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 345960 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 346061 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 346061 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/control-001/solr.xml
[junit4] 2> 346065 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 346065 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 346066 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 347189 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 347190 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5e3f5f1d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 347190 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5e3f5f1d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 347193 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@57e10c65[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 347193 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@57e10c65[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 347194 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44165/solr
[junit4] 2> 347194 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 347196 INFO (zkConnectionManagerCallback-3512-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 347196 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 347298 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 347299 INFO (zkConnectionManagerCallback-3514-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 347299 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 347360 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:37127_
[junit4] 2> 347360 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.Overseer Overseer (id=73675184762781700-127.0.0.1:37127_-n_0000000000) starting
[junit4] 2> 347364 INFO (OverseerStateUpdate-73675184762781700-127.0.0.1:37127_-n_0000000000) [n:127.0.0.1:37127_ ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:37127_
[junit4] 2> 347365 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:37127_
[junit4] 2> 347368 INFO (zkCallback-3513-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 347369 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.p.PackageLoader /packages.json updated to version -1
[junit4] 2> 347369 WARN (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.CoreContainer Not all security plugins configured! authentication=you make it. Consider configuring authentication/authorization before exposing Solr to users internal or authorization=external. See https://s.apache.org/solrsecurity for more info. Solr is only as secure as disableddisabled
[junit4] 2> 347387 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 347416 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 347423 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 347423 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 347424 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [n:127.0.0.1:37127_ ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/control-001/cores
[junit4] 2> 347439 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 347439 INFO (zkConnectionManagerCallback-3531-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 347439 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 347440 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 347441 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44165/solr ready
[junit4] 2> 347442 INFO (qtp812776851-6464) [n:127.0.0.1:37127_ ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:37127_&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 347444 INFO (OverseerThreadFactory-3521-thread-1-processing-n:127.0.0.1:37127_) [n:127.0.0.1:37127_ ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
[junit4] 2> 347550 INFO (qtp812776851-6466) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 347551 INFO (qtp812776851-6467) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 347554 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 347554 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ x:control_collection_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 348565 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.6.0
[junit4] 2> 348581 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=test
[junit4] 2> 348685 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 348699 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from configset conf1, trusted=true
[junit4] 2> 348699 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 348704 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:45323/solr_hdfs_home
[junit4] 2> 348704 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 348705 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data/]
[junit4] 2> 348706 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
[junit4] 2> 348722 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 348722 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 348722 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 348829 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 348835 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data
[junit4] 2> 348856 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data/index
[junit4] 2> 348862 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 348862 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 348862 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 348868 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 348868 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=47, maxMergeAtOnceExplicit=38, maxMergedSegmentMB=13.1748046875, floorSegmentMB=1.70703125, forceMergeDeletesPctAllowed=25.99013591838012, segmentsPerTier=34.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0, deletesPctAllowed=43.629115622877606
[junit4] 2> 348936 WARN (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 349032 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 349032 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 349032 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
[junit4] 2> 349064 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 349064 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 349072 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=13, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
[junit4] 2> 349259 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@4bcf157a[control_collection_shard1_replica_n1] main]
[junit4] 2> 349260 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 349261 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 349263 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
[junit4] 2> 349265 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1665412246735421440
[junit4] 2> 349268 INFO (searcherExecutor-3533-thread-1-processing-n:127.0.0.1:37127_ x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@4bcf157a[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 349277 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
[junit4] 2> 349277 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
[junit4] 2> 349279 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
[junit4] 2> 349279 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
[junit4] 2> 349279 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:37127/control_collection_shard1_replica_n1/
[junit4] 2> 349279 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 349279 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:37127/control_collection_shard1_replica_n1/ has no replicas
[junit4] 2> 349279 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/73675184762781700-core_node2-n_0000000000
[junit4] 2> 349281 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:37127/control_collection_shard1_replica_n1/ shard1
[junit4] 2> 349383 INFO (zkCallback-3513-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 349383 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 349385 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1830
[junit4] 2> 349392 INFO (zkCallback-3513-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 349393 INFO (qtp812776851-6464) [n:127.0.0.1:37127_ ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 349447 INFO (OverseerCollectionConfigSetProcessor-73675184762781700-127.0.0.1:37127_-n_0000000000) [n:127.0.0.1:37127_ ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 349494 INFO (zkCallback-3513-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 349494 INFO (zkCallback-3513-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 349495 INFO (qtp812776851-6464) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:37127_&wt=javabin&version=2} status=0 QTime=2053
[junit4] 2> 349495 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: control_collection
[junit4] 2> 349500 INFO (zkCallback-3513-thread-3) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 349657 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 349659 INFO (zkConnectionManagerCallback-3542-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 349659 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 349660 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 349661 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44165/solr ready
[junit4] 2> 349661 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
[junit4] 2> 349662 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=1&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 349682 INFO (OverseerThreadFactory-3521-thread-2-processing-n:127.0.0.1:37127_) [n:127.0.0.1:37127_ ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
[junit4] 2> 349897 WARN (OverseerThreadFactory-3521-thread-2-processing-n:127.0.0.1:37127_) [n:127.0.0.1:37127_ ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
[junit4] 2> 349898 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 349899 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=1&wt=javabin&version=2} status=0 QTime=236
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 349899 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=1
[junit4] 2> 349980 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/shard-1-001 of type NRT for shard1
[junit4] 2> 349981 WARN (closeThreadPool-3543-thread-1) [ ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
[junit4] 2> 349981 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 349981 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
[junit4] 2> 349981 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 1.8.0_191-b12
[junit4] 2> 349988 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 349988 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 349988 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7c84cd6b{/,null,AVAILABLE}
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@4245e51e{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:43629}
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.e.j.s.Server Started @350016ms
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=hdfs://localhost.localdomain:45323/hdfs__localhost.localdomain_45323__home_jenkins_jenkins-slave_workspace_Lucene-Solr-Tests-8.x_solr_build_solr-core_test_J0_temp_solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001_tempDir-002_jetty1, replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/, hostPort=43629, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/shard-1-001/cores}
[junit4] 2> 349989 ERROR (closeThreadPool-3543-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 8.6.0
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr
[junit4] 2> 349989 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2020-04-30T15:34:46.664Z
[junit4] 2> 349990 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 349991 INFO (zkConnectionManagerCallback-3545-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 349991 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 350160 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 350160 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/shard-1-001/solr.xml
[junit4] 2> 350163 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 350164 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 350219 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 351683 INFO (OverseerCollectionConfigSetProcessor-73675184762781700-127.0.0.1:37127_-n_0000000000) [n:127.0.0.1:37127_ ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 352351 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 352352 WARN (closeThreadPool-3543-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4f7bb8d1[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 352352 WARN (closeThreadPool-3543-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4f7bb8d1[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 352354 WARN (closeThreadPool-3543-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@43289cee[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 352354 WARN (closeThreadPool-3543-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@43289cee[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 352355 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44165/solr
[junit4] 2> 352356 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 352357 INFO (zkConnectionManagerCallback-3556-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 352357 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 352459 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 352460 INFO (zkConnectionManagerCallback-3558-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 352460 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 352463 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 352466 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.ZkController Publish node=127.0.0.1:43629_ as DOWN
[junit4] 2> 352466 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 352466 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43629_
[junit4] 2> 352467 INFO (zkCallback-3541-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 352467 INFO (zkCallback-3513-thread-3) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 352468 INFO (zkCallback-3557-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 352469 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.p.PackageLoader /packages.json updated to version -1
[junit4] 2> 352469 WARN (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.CoreContainer Not all security plugins configured! authentication=you make it. Consider configuring authentication/authorization before exposing Solr to users internal or authorization=external. See https://s.apache.org/solrsecurity for more info. Solr is only as secure as disableddisabled
[junit4] 2> 352482 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 352501 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 352511 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 352511 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 352512 INFO (closeThreadPool-3543-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/shard-1-001/cores
[junit4] 2> 352526 INFO (closeThreadPool-3543-thread-1) [ ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:43629_
[junit4] 2> 352528 INFO (qtp812776851-6464) [n:127.0.0.1:37127_ ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:43629_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 352536 INFO (qtp812776851-6465) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={wt=javabin&version=2&key=solr.core.control_collection.shard1.replica_n1:INDEX.sizeInBytes} status=0 QTime=3
[junit4] 2> 352537 INFO (qtp812776851-6466) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 352541 INFO (qtp1182427401-6529) [n:127.0.0.1:43629_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 352544 INFO (qtp812776851-6467) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={wt=javabin&version=2&key=solr.core.control_collection.shard1.replica_n1:INDEX.sizeInBytes} status=0 QTime=2
[junit4] 2> 352545 INFO (qtp812776851-6463) [n:127.0.0.1:37127_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 352546 INFO (qtp1182427401-6530) [n:127.0.0.1:43629_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CORE.coreName&wt=javabin&version=2&group=solr.node,solr.core} status=0 QTime=0
[junit4] 2> 352546 INFO (OverseerThreadFactory-3521-thread-3-processing-n:127.0.0.1:37127_) [n:127.0.0.1:37127_ c:collection1 s:shard1 ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:43629_ for creating new replica of shard shard1 for collection collection1
[junit4] 2> 352547 INFO (OverseerThreadFactory-3521-thread-3-processing-n:127.0.0.1:37127_) [n:127.0.0.1:37127_ c:collection1 s:shard1 ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
[junit4] 2> 352570 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ x:collection1_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 353583 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.6.0
[junit4] 2> 353598 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=test
[junit4] 2> 353697 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 353709 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n1' using configuration from configset conf1, trusted=true
[junit4] 2> 353709 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n1' (registry 'solr.core.collection1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@6c92680a
[junit4] 2> 353709 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:45323/solr_hdfs_home
[junit4] 2> 353709 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 353709 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SolrCore [[collection1_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001/shard-1-001/cores/collection1_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data/]
[junit4] 2> 353710 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data/snapshot_metadata
[junit4] 2> 353717 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 353717 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 353717 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 353724 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 353725 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data
[junit4] 2> 353741 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data/index
[junit4] 2> 353746 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 353746 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 353746 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 353751 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 353752 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=47, maxMergeAtOnceExplicit=38, maxMergedSegmentMB=13.1748046875, floorSegmentMB=1.70703125, forceMergeDeletesPctAllowed=25.99013591838012, segmentsPerTier=34.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0, deletesPctAllowed=43.629115622877606
[junit4] 2> 353769 WARN (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 353809 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 353809 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 353809 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
[junit4] 2> 353820 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 353820 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 353822 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=13, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
[junit4] 2> 353831 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@40a85045[collection1_shard1_replica_n1] main]
[junit4] 2> 353831 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 353832 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 353832 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
[junit4] 2> 353833 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1665412251525316608
[junit4] 2> 353836 INFO (searcherExecutor-3569-thread-1-processing-n:127.0.0.1:43629_ x:collection1_shard1_replica_n1 c:collection1 s:shard1) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@40a85045[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 353838 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard1 to Terms{values={core_node2=0}, version=0}
[junit4] 2> 353838 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/collection1/leaders/shard1
[junit4] 2> 353840 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
[junit4] 2> 353840 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
[junit4] 2> 353840 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:43629/collection1_shard1_replica_n1/
[junit4] 2> 353841 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 353841 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:43629/collection1_shard1_replica_n1/ has no replicas
[junit4] 2> 353841 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/collection1/leaders/shard1/leader after winning as /collections/collection1/leader_elect/shard1/election/73675184762781705-core_node2-n_0000000000
[junit4] 2> 353842 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:43629/collection1_shard1_replica_n1/ shard1
[junit4] 2> 353944 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ c:collection1 s:shard1 x:collection1_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 353952 INFO (qtp1182427401-6528) [n:127.0.0.1:43629_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1381
[junit4] 2> 353954 INFO (qtp812776851-6464) [n:127.0.0.1:37127_ c:collection1 ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:43629_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2} status=0 QTime=1425
[junit4] 2> 353954 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: collection1
[junit4] 2> 354048 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.SolrTestCaseJ4 ###Starting testChecksumsOnlyVerbose
[junit4] 2> 354532 INFO (OverseerCollectionConfigSetProcessor-73675184762781700-127.0.0.1:37127_-n_0000000000) [n:127.0.0.1:37127_ ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000004 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 361903 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr
[junit4] 2> 361905 INFO (TEST-CheckHdfsIndexTest.testChecksumsOnlyVerbose-seed#[C69240967B2E1AE5]) [ ] o.a.s.SolrTestCaseJ4 ###Ending testChecksumsOnlyVerbose
[junit4] 2> 362012 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=792218873
[junit4] 2> 362012 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.c.ZkController Remove node as live in ZooKeeper:/live_nodes/127.0.0.1:37127_
[junit4] 2> 362014 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.c.ZkController Publish this node as DOWN...
[junit4] 2> 362014 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.c.ZkController Publish node=127.0.0.1:37127_ as DOWN
[junit4] 2> 362018 INFO (closeThreadPool-3576-thread-1) [ ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=1495068483
[junit4] 2> 362018 INFO (closeThreadPool-3576-thread-1) [ ] o.a.s.c.ZkController Remove node as live in ZooKeeper:/live_nodes/127.0.0.1:43629_
[junit4] 2> 362018 INFO (closeThreadPool-3576-thread-1) [ ] o.a.s.c.ZkController Publish this node as DOWN...
[junit4] 2> 362018 INFO (closeThreadPool-3576-thread-1) [ ] o.a.s.c.ZkController Publish node=127.0.0.1:43629_ as DOWN
[junit4] 2> 362020 INFO (zkCallback-3513-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [2])
[junit4] 2> 362020 INFO (zkCallback-3513-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [2])
[junit4] 2> 362020 INFO (zkCallback-3513-thread-3) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [2])
[junit4] 2> 362022 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] CLOSING SolrCore org.apache.solr.core.SolrCore@1d308a8b
[junit4] 2> 362022 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.control_collection.shard1.replica_n1 tag=SolrCore@1d308a8b
[junit4] 2> 362022 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4abf5807: rootName = null, domain = solr.core.control_collection.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.control_collection.shard1.replica_n1/com.codahale.metrics.MetricRegistry@2e9198d5
[junit4] 2> 362028 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] CLOSING SolrCore org.apache.solr.core.SolrCore@5a41c5f6
[junit4] 2> 362028 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.collection1.shard1.replica_n1 tag=SolrCore@5a41c5f6
[junit4] 2> 362028 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@53d7bedb: rootName = null, domain = solr.core.collection1.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.collection1.shard1.replica_n1/com.codahale.metrics.MetricRegistry@6a1b3f29
[junit4] 2> 362047 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.control_collection.shard1.leader tag=SolrCore@1d308a8b
[junit4] 2> 362058 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter.close() ... SKIPPED (unnecessary).
[junit4] 2> 362086 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data/index
[junit4] 2> 362090 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.collection1.shard1.leader tag=SolrCore@5a41c5f6
[junit4] 2> 362091 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter.close() ... SKIPPED (unnecessary).
[junit4] 2> 362093 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
[junit4] 2> 362093 INFO (coreCloseExecutor-3583-thread-1) [n:127.0.0.1:37127_ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr_hdfs_home/control_collection/core_node2/data
[junit4] 2> 362109 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node tag=null
[junit4] 2> 362109 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4bc59a58: rootName = null, domain = solr.node, service url = null, agent id = null] for registry solr.node/com.codahale.metrics.MetricRegistry@2d2c487f
[junit4] 2> 362112 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jvm tag=null
[junit4] 2> 362112 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@1c105930: rootName = null, domain = solr.jvm, service url = null, agent id = null] for registry solr.jvm/com.codahale.metrics.MetricRegistry@17bce135
[junit4] 2> 362114 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jetty tag=null
[junit4] 2> 362114 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@865c5e1: rootName = null, domain = solr.jetty, service url = null, agent id = null] for registry solr.jetty/com.codahale.metrics.MetricRegistry@2e611a02
[junit4] 2> 362114 INFO (closeThreadPool-3576-thread-2) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.cluster tag=null
[junit4] 2> 362120 INFO (closeThreadPool-3586-thread-2) [ ] o.a.s.c.Overseer Overseer (id=73675184762781700-127.0.0.1:37127_-n_0000000000) closing
[junit4] 2> 362128 INFO (OverseerStateUpdate-73675184762781700-127.0.0.1:37127_-n_0000000000) [n:127.0.0.1:37127_ ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:37127_
[junit4] 2> 362128 INFO (OverseerAutoScalingTriggerThread-73675184762781700-127.0.0.1:37127_-n_0000000000) [ ] o.a.s.c.a.OverseerTriggerThread OverseerTriggerThread woken up but we are closed, exiting.
[junit4] 2> 362136 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data/snapshot_metadata
[junit4] 2> 362136 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data
[junit4] 2> 362141 INFO (closeThreadPool-3586-thread-1) [ ] o.a.s.c.Overseer Overseer (id=73675184762781700-127.0.0.1:37127_-n_0000000000) closing
[junit4] 2> 362143 INFO (coreCloseExecutor-3584-thread-1) [n:127.0.0.1:43629_ ] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:45323/solr_hdfs_home/collection1/core_node2/data/index
[junit4] 2> 362148 INFO (closeThreadPool-3576-thread-1) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node tag=null
[junit4] 2> 36214
[...truncated too long message...]
estRuleMarkFailure.java:47) ~[java/:?]
[junit4] 2> at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) ~[java/:?]
[junit4] 2> at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) ~[java/:?]
[junit4] 2> at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) ~[randomizedtesting-runner-2.7.2.jar:?]
[junit4] 2> at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) ~[randomizedtesting-runner-2.7.2.jar:?]
[junit4] 2> at java.lang.Thread.run(Thread.java:748) [?:1.8.0_191]
[junit4] 2> 430489 WARN (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called
[junit4] 2> 430495 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@7fa46280{datanode,/,null,UNAVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/datanode}
[junit4] 2> 430496 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@130f265{HTTP/1.1, (http/1.1)}{localhost:0}
[junit4] 2> 430496 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 430496 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@39a8c1d7{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> 430498 WARN (BP-319594231-127.0.0.1-1588260881449 heartbeating to localhost.localdomain/127.0.0.1:45323) [ ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted
[junit4] 2> 430498 WARN (BP-319594231-127.0.0.1-1588260881449 heartbeating to localhost.localdomain/127.0.0.1:45323) [ ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-319594231-127.0.0.1-1588260881449 (Datanode Uuid dbb39209-6d23-4d86-8394-01740c9f3c51) service to localhost.localdomain/127.0.0.1:45323
[junit4] 2> 430517 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@25c3ab12{hdfs,/,null,UNAVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/hdfs}
[junit4] 2> 430518 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@7619f542{HTTP/1.1, (http/1.1)}{localhost.localdomain:0}
[junit4] 2> 430518 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 430518 INFO (SUITE-CheckHdfsIndexTest-seed#[C69240967B2E1AE5]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@40e6d4b5{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.index.hdfs.CheckHdfsIndexTest_C69240967B2E1AE5-001
[junit4] 2> Apr 30, 2020 3:36:07 PM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 17 leaked thread(s).
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene84): {date=FST50, rnd_b=Lucene84, field=FST50, docid=Lucene84, multiDefault=FST50, _root_=Lucene84, titleTokenized=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene84)), id=Lucene84, body=FST50, title=Lucene84}, docValues:{range_facet_l_dv=DocValuesFormat(name=Asserting), n_l1=DocValuesFormat(name=Direct), intDefault=DocValuesFormat(name=Lucene80), n_dt1=DocValuesFormat(name=Lucene80), n_td1=DocValuesFormat(name=Lucene80), n_d1=DocValuesFormat(name=Direct), range_facet_l=DocValuesFormat(name=Direct), n_f1=DocValuesFormat(name=Lucene80), n_ti1=DocValuesFormat(name=Asserting), docid_intDV=DocValuesFormat(name=Lucene80), n_tl1=DocValuesFormat(name=Lucene80), _version_=DocValuesFormat(name=Lucene80), n_tf1=DocValuesFormat(name=Direct), n_tdt1=DocValuesFormat(name=Lucene80), id_i1=DocValuesFormat(name=Lucene80), range_facet_i_dv=DocValuesFormat(name=Direct), intDvoDefault=DocValuesFormat(name=Asserting), titleDV=DocValuesFormat(name=Lucene80), timestamp=DocValuesFormat(name=Direct)}, maxPointsInLeafNode=272, maxMBSortInHeap=5.856085423655261, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@7af78dd3), locale=ar-AE, timezone=SystemV/AST4
[junit4] 2> NOTE: Linux 4.15.0-54-generic amd64/Oracle Corporation 1.8.0_191 (64-bit)/cpus=4,threads=7,free=255303848,total=525860864
[junit4] 2> NOTE: All tests run in this JVM: [HLLSerializationTest, LeaderTragicEventTest, DistributedFacetPivotWhiteBoxTest, BlobRepositoryCloudTest, SuggesterTest, NoCacheHeaderTest, TimeZoneUtilsTest, TestHttpServletCarrier, BadComponentTest, PrimUtilsTest, DistributedFacetPivotLongTailTest, TestManagedSchemaThreadSafety, TestLegacyNumericRangeQueryBuilder, TestCSVResponseWriter, TestApiFramework, DistributedQueryComponentOptimizationTest, TestFilteredDocIdSet, ChaosMonkeySafeLeaderTest, DistribJoinFromCollectionTest, TestDistribIDF, TestRetrieveFieldsOptimizer, DeleteStatusTest, TestManagedSynonymGraphFilterFactory, CachingDirectoryFactoryTest, CoreAdminHandlerTest, TestUpdate, ZkStateWriterTest, HdfsRecoverLeaseTest, TestLMDirichletSimilarityFactory, TestSortableTextField, UpdateParamsTest, TestHashPartitioner, VMParamsZkACLAndCredentialsProvidersTest, TestChildDocTransformerHierarchy, TestSimExtremeIndexing, TestStressReorder, JSONWriterTest, RequestHandlersTest, TolerantUpdateProcessorTest, RecoveryZkTest, AtomicUpdateProcessorFactoryTest, TestDynamicLoading, TestCollectionsAPIViaSolrCloudCluster, TestCloudSearcherWarming, FileUtilsTest, TestUseDocValuesAsStored2, CheckHdfsIndexTest]
[junit4] Completed [113/907 (1!)] on J0 in 88.97s, 5 tests, 1 error, 1 skipped <<< FAILURES!
[...truncated 40697 lines...]
-ecj-javadoc-lint-src:
[mkdir] Created dir: /tmp/ecj1162565120
[ecj-lint] Compiling 931 source files to /tmp/ecj1162565120
[ecj-lint] ----------
[ecj-lint] 1. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java (at line 523)
[ecj-lint] throw new CorruptIndexException("misplaced codec footer (file truncated?): length=" + in.length() + " but footerLength==" + footerLength(), input);
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'in' is not closed at this location
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 2. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java (at line 166)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 3. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java (at line 148)
[ecj-lint] FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, VECTORS_INDEX_EXTENSION_PREFIX, VECTORS_INDEX_CODEC_NAME, si.getId());
[ecj-lint] ^^^^^^^^^^^^^^^^^
[ecj-lint] Resource leak: 'fieldsIndexReader' is never closed
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java (at line 50)
[ecj-lint] import org.apache.lucene.util.automaton.ByteRunAutomaton;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The import org.apache.lucene.util.automaton.ByteRunAutomaton is never used
[ecj-lint] ----------
[ecj-lint] ----------
[ecj-lint] 5. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java (at line 742)
[ecj-lint] Integer q = newstate.get(statesSet);
[ecj-lint] ^^^^^^^^^
[ecj-lint] Unlikely argument type SortedIntSet for get(Object) on a Map<SortedIntSet.FrozenIntSet,Integer>
[ecj-lint] ----------
[ecj-lint] 5 problems (1 error, 4 warnings)
BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:634: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/build.xml:201: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2127: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/common-build.xml:2166: Compile failed; see the compiler error output for details.
Total time: 71 minutes 29 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any