You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/07/03 08:11:53 UTC

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 2242 - Unstable

Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2242/

3 tests failed.
FAILED:  org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitWithChaosMonkey

Error Message:
Address already in use

Stack Trace:
java.net.BindException: Address already in use
	at __randomizedtesting.SeedInfo.seed([6243E2A8ECD8B7B7:E9643179ADDE1C33]:0)
	at java.base/sun.nio.ch.Net.bind0(Native Method)
	at java.base/sun.nio.ch.Net.bind(Net.java:461)
	at java.base/sun.nio.ch.Net.bind(Net.java:453)
	at java.base/sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:227)
	at java.base/sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:80)
	at org.eclipse.jetty.server.ServerConnector.openAcceptChannel(ServerConnector.java:342)
	at org.eclipse.jetty.server.ServerConnector.open(ServerConnector.java:307)
	at org.eclipse.jetty.server.AbstractNetworkConnector.doStart(AbstractNetworkConnector.java:80)
	at org.eclipse.jetty.server.ServerConnector.doStart(ServerConnector.java:231)
	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:72)
	at org.eclipse.jetty.server.Server.doStart(Server.java:385)
	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:72)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.retryOnPortBindFailure(JettySolrRunner.java:566)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:504)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:472)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitWithChaosMonkey(ShardSplitTest.java:497)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1754)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:942)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:978)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:992)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1090)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1061)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:370)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:819)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:470)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:951)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:836)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:887)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:898)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:370)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.lambda$forkTimeoutingTask$0(ThreadLeakControl.java:826)
	at java.base/java.lang.Thread.run(Thread.java:834)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.api.collections.ShardSplitTest

Error Message:
10 threads leaked from SUITE scope at org.apache.solr.cloud.api.collections.ShardSplitTest:     1) Thread[id=3084, name=qtp1827410635-3084, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    2) Thread[id=3083, name=qtp1827410635-3083, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    3) Thread[id=3080, name=qtp1827410635-3080, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    4) Thread[id=3168, name=Connector-Scheduler-68d39b8e-1, state=WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    5) Thread[id=3081, name=qtp1827410635-3081, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    6) Thread[id=3082, name=qtp1827410635-3082-acceptor-0@3df2da8-ServerConnector@68d39b8e{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:44119}, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.6/sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)         at java.base@11.0.6/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:533)         at java.base@11.0.6/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:285)         at app//org.eclipse.jetty.server.ServerConnector.accept(ServerConnector.java:385)         at app//org.eclipse.jetty.server.AbstractConnector$Acceptor.run(AbstractConnector.java:702)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    7) Thread[id=3085, name=qtp1827410635-3085, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    8) Thread[id=3088, name=Session-HouseKeeper-780e3d40-1, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    9) Thread[id=3087, name=qtp1827410635-3087, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)   10) Thread[id=3086, name=qtp1827410635-3086, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)

Stack Trace:
com.carrotsearch.randomizedtesting.ThreadLeakError: 10 threads leaked from SUITE scope at org.apache.solr.cloud.api.collections.ShardSplitTest: 
   1) Thread[id=3084, name=qtp1827410635-3084, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   2) Thread[id=3083, name=qtp1827410635-3083, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   3) Thread[id=3080, name=qtp1827410635-3080, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   4) Thread[id=3168, name=Connector-Scheduler-68d39b8e-1, state=WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   5) Thread[id=3081, name=qtp1827410635-3081, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   6) Thread[id=3082, name=qtp1827410635-3082-acceptor-0@3df2da8-ServerConnector@68d39b8e{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:44119}, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)
        at java.base@11.0.6/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:533)
        at java.base@11.0.6/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:285)
        at app//org.eclipse.jetty.server.ServerConnector.accept(ServerConnector.java:385)
        at app//org.eclipse.jetty.server.AbstractConnector$Acceptor.run(AbstractConnector.java:702)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   7) Thread[id=3085, name=qtp1827410635-3085, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   8) Thread[id=3088, name=Session-HouseKeeper-780e3d40-1, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   9) Thread[id=3087, name=qtp1827410635-3087, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
  10) Thread[id=3086, name=qtp1827410635-3086, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
	at __randomizedtesting.SeedInfo.seed([6243E2A8ECD8B7B7]:0)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.api.collections.ShardSplitTest

Error Message:
There are still zombie threads that couldn't be terminated:    1) Thread[id=3084, name=qtp1827410635-3084, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    2) Thread[id=3083, name=qtp1827410635-3083, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    3) Thread[id=3080, name=qtp1827410635-3080, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    4) Thread[id=3168, name=Connector-Scheduler-68d39b8e-1, state=WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    5) Thread[id=3081, name=qtp1827410635-3081, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    6) Thread[id=3082, name=qtp1827410635-3082, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    7) Thread[id=3085, name=qtp1827410635-3085, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    8) Thread[id=3088, name=Session-HouseKeeper-780e3d40-1, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)    9) Thread[id=3087, name=qtp1827410635-3087, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)   10) Thread[id=3086, name=qtp1827410635-3086, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)

Stack Trace:
com.carrotsearch.randomizedtesting.ThreadLeakError: There are still zombie threads that couldn't be terminated:
   1) Thread[id=3084, name=qtp1827410635-3084, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   2) Thread[id=3083, name=qtp1827410635-3083, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   3) Thread[id=3080, name=qtp1827410635-3080, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   4) Thread[id=3168, name=Connector-Scheduler-68d39b8e-1, state=WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   5) Thread[id=3081, name=qtp1827410635-3081, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   6) Thread[id=3082, name=qtp1827410635-3082, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   7) Thread[id=3085, name=qtp1827410635-3085, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   8) Thread[id=3088, name=Session-HouseKeeper-780e3d40-1, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
        at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   9) Thread[id=3087, name=qtp1827410635-3087, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
  10) Thread[id=3086, name=qtp1827410635-3086, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
        at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
	at __randomizedtesting.SeedInfo.seed([6243E2A8ECD8B7B7]:0)




Build Log:
[...truncated 12537 lines...]
   [junit4] Suite: org.apache.solr.cloud.api.collections.ShardSplitTest
   [junit4]   2> 159523 INFO  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.SolrTestCase Setting 'solr.default.confdir' system property to test-framework derived value of '/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/server/solr/configsets/_default/conf'
   [junit4]   2> 159523 INFO  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 159524 INFO  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/data-dir-20-001
   [junit4]   2> 159524 WARN  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=1 numCloses=1
   [junit4]   2> 159524 INFO  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 159525 INFO  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-5776")
   [junit4]   2> 159525 INFO  (SUITE-ShardSplitTest-seed#[6243E2A8ECD8B7B7]-worker) [     ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 159528 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 159528 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port: 0.0.0.0/0.0.0.0:0
   [junit4]   2> 159529 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 159529 WARN  (ZkTestServer Run Thread) [     ] o.a.z.s.ServerCnxnFactory maxCnxns is not configured, using default value 0.
   [junit4]   2> 159628 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer start zk server on port: 36275
   [junit4]   2> 159629 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:36275
   [junit4]   2> 159629 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:36275
   [junit4]   2> 159629 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 36275
   [junit4]   2> 159632 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 159644 INFO  (zkConnectionManagerCallback-2070-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 159644 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 159658 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 159667 INFO  (zkConnectionManagerCallback-2072-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 159667 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 159668 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 159669 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/schema15.xml to /configs/conf1/schema.xml
   [junit4]   2> 159671 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 159672 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 159673 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 159675 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 159676 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 159677 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 159679 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 159680 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 159681 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 159682 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 159850 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 159850 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 159850 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 159852 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 159852 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 159852 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 159854 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@f1dba5e{/,null,AVAILABLE}
   [junit4]   2> 159854 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.AbstractConnector Started ServerConnector@2edd328d{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:44117}
   [junit4]   2> 159854 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.s.Server Started @159907ms
   [junit4]   2> 159854 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/tempDir-001/control/data, hostPort=44117, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/control-001/cores, replicaType=NRT}
   [junit4]   2> 159854 ERROR (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 159855 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 159855 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 159855 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 159855 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr
   [junit4]   2> 159855 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2020-07-03T06:34:02.711165Z
   [junit4]   2> 159859 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 159863 INFO  (zkConnectionManagerCallback-2074-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 159863 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 159964 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 159964 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/control-001/solr.xml
   [junit4]   2> 159969 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@1675594a, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 160790 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 160792 WARN  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@2894e39[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 160792 WARN  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@2894e39[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 160798 WARN  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@296756e0[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 160798 WARN  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@296756e0[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 160799 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:36275/solr
   [junit4]   2> 160800 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 160803 INFO  (zkConnectionManagerCallback-2085-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 160803 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 160805 WARN  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]-SendThread(127.0.0.1:36275)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x1000c408c4f0003.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x1000c408c4f0003, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x1000c408c4f0003, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:348) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1262) [zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 160908 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 160915 INFO  (zkConnectionManagerCallback-2087-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 160915 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 161002 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:44117_
   [junit4]   2> 161003 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer Overseer (id=72071065409355780-127.0.0.1:44117_-n_0000000000) starting
   [junit4]   2> 161007 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Process current queue of overseer operations
   [junit4]   2> 161007 INFO  (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:44117_
   [junit4]   2> 161008 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:44117_
   [junit4]   2> 161009 INFO  (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 161010 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 0 #Completed tasks: 0
   [junit4]   2> 161010 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 161010 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 161010 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 161010 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: []
   [junit4]   2> 161012 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.p.PackageLoader /packages.json updated to version -1
   [junit4]   2> 161012 WARN  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.CoreContainer Not all security plugins configured!  authentication=disabled authorization=disabled.  Solr is only as secure as you make it. Consider configuring authentication/authorization before exposing Solr to users internal or external.  See https://s.apache.org/solrsecurity for more info
   [junit4]   2> 161035 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 161059 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1675594a
   [junit4]   2> 161067 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1675594a
   [junit4]   2> 161067 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1675594a
   [junit4]   2> 161068 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [n:127.0.0.1:44117_     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/control-001/cores
   [junit4]   2> 161079 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 161089 INFO  (zkConnectionManagerCallback-2104-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 161089 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 161091 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 161091 INFO  (TEST-ShardSplitTest.test-seed#[6243E2A8ECD8B7B7]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:36275/solr ready
   [junit4]   2> 161094 INFO  (qtp1026218728-2297) [n:127.0.0.1:44117_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:44117_&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 161096 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Got 1 tasks from work-queue : [[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ecf]]
   [junit4]   2> 161096 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000000] as running
   [junit4]   2> 161096 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Get the message id: /overseer/collection-queue-work/qn-0000000000 message: {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:44117_",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 161097 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Runner processing /overseer/collection-queue-work/qn-0000000000
   [junit4]   2> 161097 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.OverseerCollectionMessageHandler OverseerCollectionMessageHandler.processMessage : create , {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:44117_",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 161097 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 1 #Completed tasks: 0
   [junit4]   2> 161097 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor RunningTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 161097 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 161097 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 161097 DEBUG (OverseerCollectionConfigSetProcessor-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 161097 INFO  (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 161098 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.OverseerCollectionMessageHandler creating collections conf node /collections/control_collection 
   [junit4]   2> 161098 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.CreateCollectionCmd Check for collection zkNode: control_collection
   [junit4]   2> 161099 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.CreateCollectionCmd Collection zkNode exists
   [junit4]   2> 161100 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:44117_",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 161100 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ClusterStateMutator building a new cName: control_collection
   [junit4]   2> 161100 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ZkStateWriter going to create_collection /collections/control_collection/state.json
   [junit4]   2> 161201 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.Assign shardnames [shard1] NRT 1 TLOG 0 PULL 0 , policy null, nodeList [127.0.0.1:44117_]
   [junit4]   2> 161203 INFO  (qtp1026218728-2299) [n:127.0.0.1:44117_     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 161204 INFO  (qtp1026218728-2299) [n:127.0.0.1:44117_     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CONTAINER.cores&wt=javabin&version=2&group=solr.node} status=0 QTime=0
   [junit4]   2> 161206 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.CreateCollectionCmd Creating SolrCores for new collection control_collection, shardNames [shard1] , message : {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:44117_",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 161211 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.CreateCollectionCmd Creating core control_collection_shard1_replica_n1 as part of shard shard1 of collection control_collection on 127.0.0.1:44117_
   [junit4]   2> 161213 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.OverseerCollectionMessageHandler Expecting [control_collection_shard1_replica_n1] cores but found {}
   [junit4]   2> 161215 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"}
   [junit4]   2> 161216 INFO  (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 161217 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.SliceMutator Old Slice: shard1:{
   [junit4]   2>   "range":"80000000-7fffffff",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "replicas":{}}
   [junit4]   2> 161217 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.SliceMutator New Slice: shard1:{
   [junit4]   2>   "range":"80000000-7fffffff",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "replicas":{"core_node2":{
   [junit4]   2>       "core":"control_collection_shard1_replica_n1",
   [junit4]   2>       "base_url":"http://127.0.0.1:44117",
   [junit4]   2>       "state":"down",
   [junit4]   2>       "node_name":"127.0.0.1:44117_",
   [junit4]   2>       "type":"NRT"}}}
   [junit4]   2> 161314 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.OverseerCollectionMessageHandler Expecting [control_collection_shard1_replica_n1] cores but found {}
   [junit4]   2> 161318 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 0
   [junit4]   2> 161416 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_    x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 161420 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "force_set_state":"false",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 161420 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ReplicaMutator Update state numShards=1 message={
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "force_set_state":"false",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 161420 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node2:{
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "force_set_state":"false"}
   [junit4]   2> 161420 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(control_collection/1)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{"shard1":{
   [junit4]   2>       "range":"80000000-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node2":{
   [junit4]   2>           "core":"control_collection_shard1_replica_n1",
   [junit4]   2>           "base_url":"http://127.0.0.1:44117",
   [junit4]   2>           "node_name":"127.0.0.1:44117_",
   [junit4]   2>           "state":"down",
   [junit4]   2>           "type":"NRT",
   [junit4]   2>           "force_set_state":"false"}}}}}
   [junit4]   2> 161435 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 161450 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=test
   [junit4]   2> 161521 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 161522 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 1
   [junit4]   2> 161553 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from configset conf1, trusted=true
   [junit4]   2> 161554 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1675594a
   [junit4]   2> 161557 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_6243E2A8ECD8B7B7-001/control-001/cores/control_collection_shard1_replica_n1/data/]
   [junit4]   2> 161560 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=15, maxMergeAtOnceExplicit=15, maxMergedSegmentMB=70.87109375, floorSegmentMB=1.64453125, forceMergeDeletesPctAllowed=20.305151078848503, segmentsPerTier=20.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7319307459268234, deletesPctAllowed=36.71872612054525
   [junit4]   2> 161575 WARN  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A, b=B}}}
   [junit4]   2> 161637 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 161637 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 161639 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 161639 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 161640 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=32, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0]
   [junit4]   2> 161643 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 161643 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 161643 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
   [junit4]   2> 161644 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1671176435269632000
   [junit4]   2> 161649 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 161649 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 161650 INFO  (searcherExecutor-2106-thread-1-processing-n:127.0.0.1:44117_ x:control_collection_shard1_replica_n1 c:control_collection s:shard1 r:core_node2) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1]  Registered new searcher autowarm time: 0 ms
   [junit4]   2> 161652 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 161652 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 161652 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:44117/control_collection_shard1_replica_n1/
   [junit4]   2> 161652 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 161652 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:44117/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 161652 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/72071065409355780-core_node2-n_0000000000
   [junit4]   2> 161654 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:44117/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 161654 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"leader",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "state":"active"}
   [junit4]   2> 161755 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 2
   [junit4]   2> 161756 INFO  (zkCallback-2086-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 161756 INFO  (zkCallback-2086-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 161757 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_ c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 161759 INFO  (qtp1026218728-2300) [n:127.0.0.1:44117_     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=342
   [junit4]   2> 161759 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.a.c.CreateCollectionCmd Finished create command on all shards for collection: control_collection
   [junit4]   2> 161759 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "force_set_state":"false",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 161759 DEBUG (OverseerThreadFactory-2094-thread-1-processing-n:127.0.0.1:44117_) [n:127.0.0.1:44117_     ] o.a.s.c.OverseerTaskProcessor Completed task:[/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 161759 DEBUG (OverseerStateUpdate-72071065409355780-127.0.0.1:44117_-n_0000000000) [n:127.0.0.1:44117_     ] o.a.s.c.o.ReplicaMutator Update state numShards=1 message={
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:44117",
   [junit4]   2>   "node_name":"127.0.0.1:44117_",
   [junit4]   2>   "numShards":"1",
   [junit4]   2

[...truncated too long message...]

se.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    3) Thread[id=3080, name=qtp1827410635-3080, state=RUNNABLE, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    4) Thread[id=3168, name=Connector-Scheduler-68d39b8e-1, state=WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    5) Thread[id=3081, name=qtp1827410635-3081, state=RUNNABLE, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.EPoll.wait(Native Method)
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
   [junit4]    >         at java.base@11.0.6/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:472)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:409)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:360)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:184)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:135)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$226/0x0000000100455c40.run(Unknown Source)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    6) Thread[id=3082, name=qtp1827410635-3082, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
   [junit4]    >         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    7) Thread[id=3085, name=qtp1827410635-3085, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
   [junit4]    >         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    8) Thread[id=3088, name=Session-HouseKeeper-780e3d40-1, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    9) Thread[id=3087, name=qtp1827410635-3087, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
   [junit4]    >         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    >   10) Thread[id=3086, name=qtp1827410635-3086, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.6/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
   [junit4]    >         at java.base@11.0.6/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
   [junit4]    >         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:382)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.idleJobPoll(QueuedThreadPool.java:875)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:925)
   [junit4]    >         at java.base@11.0.6/java.lang.Thread.run(Thread.java:834)
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([6243E2A8ECD8B7B7]:0)
   [junit4] Completed [113/915 (1!)] on J2 in 599.02s, 11 tests, 3 errors <<< FAILURES!

[...truncated 54530 lines...]

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 2243 - Still Unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2243/

1 tests failed.
FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest

Error Message:
ObjectTracker found 1 object(s) that were not released!!! [HdfsTransactionLog] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.update.HdfsTransactionLog  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:129)  at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:76)  at org.apache.solr.update.HdfsUpdateLog.ensureLog(HdfsUpdateLog.java:341)  at org.apache.solr.update.UpdateLog.deleteByQuery(UpdateLog.java:664)  at org.apache.solr.update.DirectUpdateHandler2.deleteByQuery(DirectUpdateHandler2.java:532)  at org.apache.solr.update.processor.RunUpdateProcessorFactory$RunUpdateProcessor.processDelete(RunUpdateProcessorFactory.java:83)  at org.apache.solr.update.processor.UpdateRequestProcessor.processDelete(UpdateRequestProcessor.java:59)  at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDelete(DistributedUpdateProcessor.java:265)  at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDeleteByQuery(DistributedUpdateProcessor.java:913)  at org.apache.solr.update.processor.DistributedUpdateProcessor.versionDeleteByQuery(DistributedUpdateProcessor.java:884)  at org.apache.solr.update.processor.DistributedUpdateProcessor.doDeleteByQuery(DistributedUpdateProcessor.java:845)  at org.apache.solr.update.processor.DistributedZkUpdateProcessor.doDeleteByQuery(DistributedZkUpdateProcessor.java:458)  at org.apache.solr.update.processor.DistributedUpdateProcessor.processDelete(DistributedUpdateProcessor.java:748)  at org.apache.solr.update.processor.DistributedZkUpdateProcessor.processDelete(DistributedZkUpdateProcessor.java:313)  at org.apache.solr.update.processor.LogUpdateProcessorFactory$LogUpdateProcessor.processDelete(LogUpdateProcessorFactory.java:129)  at org.apache.solr.handler.loader.JavabinLoader.delete(JavabinLoader.java:211)  at org.apache.solr.handler.loader.JavabinLoader.parseAndLoadDocs(JavabinLoader.java:127)  at org.apache.solr.handler.loader.JavabinLoader.load(JavabinLoader.java:70)  at org.apache.solr.handler.UpdateRequestHandler$1.load(UpdateRequestHandler.java:97)  at org.apache.solr.handler.ContentStreamHandlerBase.handleRequestBody(ContentStreamHandlerBase.java:68)  at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:212)  at org.apache.solr.core.SolrCore.execute(SolrCore.java:2605)  at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:812)  at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:588)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:415)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:345)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)  at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)  at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)  at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)  at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)  at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)  at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)  at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)  at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)  at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)  at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)  at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)  at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)  at org.eclipse.jetty.server.Server.handle(Server.java:500)  at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)  at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)  at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)  at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:335)  at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)  at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)  at java.base/java.lang.Thread.run(Thread.java:834)   expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [HdfsTransactionLog] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.update.HdfsTransactionLog  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:129)  at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:76)  at org.apache.solr.update.HdfsUpdateLog.ensureLog(HdfsUpdateLog.java:341)  at org.apache.solr.update.UpdateLog.deleteByQuery(UpdateLog.java:664)  at org.apache.solr.update.DirectUpdateHandler2.deleteByQuery(DirectUpdateHandler2.java:532)  at org.apache.solr.update.processor.RunUpdateProcessorFactory$RunUpdateProcessor.processDelete(RunUpdateProcessorFactory.java:83)  at org.apache.solr.update.processor.UpdateRequestProcessor.processDelete(UpdateRequestProcessor.java:59)  at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDelete(DistributedUpdateProcessor.java:265)  at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDeleteByQuery(DistributedUpdateProcessor.java:913)  at org.apache.solr.update.processor.DistributedUpdateProcessor.versionDeleteByQuery(DistributedUpdateProcessor.java:884)  at org.apache.solr.update.processor.DistributedUpdateProcessor.doDeleteByQuery(DistributedUpdateProcessor.java:845)  at org.apache.solr.update.processor.DistributedZkUpdateProcessor.doDeleteByQuery(DistributedZkUpdateProcessor.java:458)  at org.apache.solr.update.processor.DistributedUpdateProcessor.processDelete(DistributedUpdateProcessor.java:748)  at org.apache.solr.update.processor.DistributedZkUpdateProcessor.processDelete(DistributedZkUpdateProcessor.java:313)  at org.apache.solr.update.processor.LogUpdateProcessorFactory$LogUpdateProcessor.processDelete(LogUpdateProcessorFactory.java:129)  at org.apache.solr.handler.loader.JavabinLoader.delete(JavabinLoader.java:211)  at org.apache.solr.handler.loader.JavabinLoader.parseAndLoadDocs(JavabinLoader.java:127)  at org.apache.solr.handler.loader.JavabinLoader.load(JavabinLoader.java:70)  at org.apache.solr.handler.UpdateRequestHandler$1.load(UpdateRequestHandler.java:97)  at org.apache.solr.handler.ContentStreamHandlerBase.handleRequestBody(ContentStreamHandlerBase.java:68)  at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:212)  at org.apache.solr.core.SolrCore.execute(SolrCore.java:2605)  at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:812)  at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:588)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:415)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:345)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)  at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)  at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)  at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)  at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)  at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)  at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)  at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)  at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)  at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)  at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)  at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)  at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)  at org.eclipse.jetty.server.Server.handle(Server.java:500)  at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)  at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)  at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)  at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:335)  at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)  at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)  at java.base/java.lang.Thread.run(Thread.java:834)  >

Stack Trace:
java.lang.AssertionError: ObjectTracker found 1 object(s) that were not released!!! [HdfsTransactionLog]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.update.HdfsTransactionLog
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:129)
	at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:76)
	at org.apache.solr.update.HdfsUpdateLog.ensureLog(HdfsUpdateLog.java:341)
	at org.apache.solr.update.UpdateLog.deleteByQuery(UpdateLog.java:664)
	at org.apache.solr.update.DirectUpdateHandler2.deleteByQuery(DirectUpdateHandler2.java:532)
	at org.apache.solr.update.processor.RunUpdateProcessorFactory$RunUpdateProcessor.processDelete(RunUpdateProcessorFactory.java:83)
	at org.apache.solr.update.processor.UpdateRequestProcessor.processDelete(UpdateRequestProcessor.java:59)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDelete(DistributedUpdateProcessor.java:265)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDeleteByQuery(DistributedUpdateProcessor.java:913)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.versionDeleteByQuery(DistributedUpdateProcessor.java:884)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.doDeleteByQuery(DistributedUpdateProcessor.java:845)
	at org.apache.solr.update.processor.DistributedZkUpdateProcessor.doDeleteByQuery(DistributedZkUpdateProcessor.java:458)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.processDelete(DistributedUpdateProcessor.java:748)
	at org.apache.solr.update.processor.DistributedZkUpdateProcessor.processDelete(DistributedZkUpdateProcessor.java:313)
	at org.apache.solr.update.processor.LogUpdateProcessorFactory$LogUpdateProcessor.processDelete(LogUpdateProcessorFactory.java:129)
	at org.apache.solr.handler.loader.JavabinLoader.delete(JavabinLoader.java:211)
	at org.apache.solr.handler.loader.JavabinLoader.parseAndLoadDocs(JavabinLoader.java:127)
	at org.apache.solr.handler.loader.JavabinLoader.load(JavabinLoader.java:70)
	at org.apache.solr.handler.UpdateRequestHandler$1.load(UpdateRequestHandler.java:97)
	at org.apache.solr.handler.ContentStreamHandlerBase.handleRequestBody(ContentStreamHandlerBase.java:68)
	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:212)
	at org.apache.solr.core.SolrCore.execute(SolrCore.java:2605)
	at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:812)
	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:588)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:415)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:345)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)
	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
	at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
	at org.eclipse.jetty.server.Server.handle(Server.java:500)
	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
	at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:335)
	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
	at java.base/java.lang.Thread.run(Thread.java:834)

 expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [HdfsTransactionLog]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.update.HdfsTransactionLog
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:129)
	at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:76)
	at org.apache.solr.update.HdfsUpdateLog.ensureLog(HdfsUpdateLog.java:341)
	at org.apache.solr.update.UpdateLog.deleteByQuery(UpdateLog.java:664)
	at org.apache.solr.update.DirectUpdateHandler2.deleteByQuery(DirectUpdateHandler2.java:532)
	at org.apache.solr.update.processor.RunUpdateProcessorFactory$RunUpdateProcessor.processDelete(RunUpdateProcessorFactory.java:83)
	at org.apache.solr.update.processor.UpdateRequestProcessor.processDelete(UpdateRequestProcessor.java:59)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDelete(DistributedUpdateProcessor.java:265)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDeleteByQuery(DistributedUpdateProcessor.java:913)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.versionDeleteByQuery(DistributedUpdateProcessor.java:884)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.doDeleteByQuery(DistributedUpdateProcessor.java:845)
	at org.apache.solr.update.processor.DistributedZkUpdateProcessor.doDeleteByQuery(DistributedZkUpdateProcessor.java:458)
	at org.apache.solr.update.processor.DistributedUpdateProcessor.processDelete(DistributedUpdateProcessor.java:748)
	at org.apache.solr.update.processor.DistributedZkUpdateProcessor.processDelete(DistributedZkUpdateProcessor.java:313)
	at org.apache.solr.update.processor.LogUpdateProcessorFactory$LogUpdateProcessor.processDelete(LogUpdateProcessorFactory.java:129)
	at org.apache.solr.handler.loader.JavabinLoader.delete(JavabinLoader.java:211)
	at org.apache.solr.handler.loader.JavabinLoader.parseAndLoadDocs(JavabinLoader.java:127)
	at org.apache.solr.handler.loader.JavabinLoader.load(JavabinLoader.java:70)
	at org.apache.solr.handler.UpdateRequestHandler$1.load(UpdateRequestHandler.java:97)
	at org.apache.solr.handler.ContentStreamHandlerBase.handleRequestBody(ContentStreamHandlerBase.java:68)
	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:212)
	at org.apache.solr.core.SolrCore.execute(SolrCore.java:2605)
	at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:812)
	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:588)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:415)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:345)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)
	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
	at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
	at org.eclipse.jetty.server.Server.handle(Server.java:500)
	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
	at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:335)
	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
	at java.base/java.lang.Thread.run(Thread.java:834)

>
	at __randomizedtesting.SeedInfo.seed([794A361D30038D6]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotNull(Assert.java:755)
	at org.junit.Assert.assertNull(Assert.java:737)
	at org.apache.solr.SolrTestCaseJ4.teardownTestCases(SolrTestCaseJ4.java:335)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1754)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:905)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:370)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.lambda$forkTimeoutingTask$0(ThreadLeakControl.java:826)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14216 lines...]
   [junit4] Suite: org.apache.solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest
   [junit4]   2> 4639878 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.s.SolrTestCase Setting 'solr.default.confdir' system property to test-framework derived value of '/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/server/solr/configsets/_default/conf'
   [junit4]   2> 4639879 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/data-dir-210-001
   [junit4]   2> 4639879 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 4639880 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-5776")
   [junit4]   2> 4639880 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 4639880 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /w_ac/
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 4639946 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 4639948 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4639949 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4639949 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4639949 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4639950 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@cc49502{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 4640063 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@76b2351b{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost_localdomain-40429-hadoop-hdfs-3_2_0-tests_jar-_-any-12508674538402464931.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/hdfs}
   [junit4]   2> 4640064 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7691e8f1{HTTP/1.1, (http/1.1)}{localhost.localdomain:40429}
   [junit4]   2> 4640064 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.Server Started @4640091ms
   [junit4]   2> 4640122 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 4640123 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4640123 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4640123 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4640123 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4640124 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@142c34b4{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 4640236 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@ea9e6ff{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-41079-hadoop-hdfs-3_2_0-tests_jar-_-any-16514738043973605169.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/datanode}
   [junit4]   2> 4640237 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@324c1fd8{HTTP/1.1, (http/1.1)}{localhost:41079}
   [junit4]   2> 4640237 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.Server Started @4640264ms
   [junit4]   2> 4640257 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 4640258 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4640259 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4640259 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4640259 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4640259 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@249fdd7f{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 4640377 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@623f5177{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-45963-hadoop-hdfs-3_2_0-tests_jar-_-any-3957386925529428853.dir/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/datanode}
   [junit4]   2> 4640377 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@47e4d8dd{HTTP/1.1, (http/1.1)}{localhost:45963}
   [junit4]   2> 4640377 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[794A361D30038D6]-worker) [     ] o.e.j.s.Server Started @4640404ms
   [junit4]   2> 4640413 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x690fcfad9ea25987: Processing first storage report for DS-b6ccdab7-b12c-487f-8524-da2ec2b21c70 from datanode a8b17abc-5c37-4685-b3d2-832b687fa981
   [junit4]   2> 4640413 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x690fcfad9ea25987: from storage DS-b6ccdab7-b12c-487f-8524-da2ec2b21c70 node DatanodeRegistration(127.0.0.1:35925, datanodeUuid=a8b17abc-5c37-4685-b3d2-832b687fa981, infoPort=38837, infoSecurePort=0, ipcPort=38757, storageInfo=lv=-57;cid=testClusterID;nsid=1818978830;c=1593851198925), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 4640413 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x690fcfad9ea25987: Processing first storage report for DS-f256341c-a271-41b7-8b44-8532bcc37a2c from datanode a8b17abc-5c37-4685-b3d2-832b687fa981
   [junit4]   2> 4640413 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x690fcfad9ea25987: from storage DS-f256341c-a271-41b7-8b44-8532bcc37a2c node DatanodeRegistration(127.0.0.1:35925, datanodeUuid=a8b17abc-5c37-4685-b3d2-832b687fa981, infoPort=38837, infoSecurePort=0, ipcPort=38757, storageInfo=lv=-57;cid=testClusterID;nsid=1818978830;c=1593851198925), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 4640548 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x8064bc2c8f86a061: Processing first storage report for DS-2bb40795-a6ef-4827-b5b7-5090073db4f1 from datanode 27ca399e-cdc1-4b85-b536-b129191888d9
   [junit4]   2> 4640548 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x8064bc2c8f86a061: from storage DS-2bb40795-a6ef-4827-b5b7-5090073db4f1 node DatanodeRegistration(127.0.0.1:35027, datanodeUuid=27ca399e-cdc1-4b85-b536-b129191888d9, infoPort=39581, infoSecurePort=0, ipcPort=36609, storageInfo=lv=-57;cid=testClusterID;nsid=1818978830;c=1593851198925), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 4640549 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x8064bc2c8f86a061: Processing first storage report for DS-006d297c-f7e7-48c0-adc6-c5f219b8b16c from datanode 27ca399e-cdc1-4b85-b536-b129191888d9
   [junit4]   2> 4640549 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x8064bc2c8f86a061: from storage DS-006d297c-f7e7-48c0-adc6-c5f219b8b16c node DatanodeRegistration(127.0.0.1:35027, datanodeUuid=27ca399e-cdc1-4b85-b536-b129191888d9, infoPort=39581, infoSecurePort=0, ipcPort=36609, storageInfo=lv=-57;cid=testClusterID;nsid=1818978830;c=1593851198925), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 4640623 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 4640623 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port: 0.0.0.0/0.0.0.0:0
   [junit4]   2> 4640623 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 4640624 WARN  (ZkTestServer Run Thread) [     ] o.a.z.s.ServerCnxnFactory maxCnxns is not configured, using default value 0.
   [junit4]   2> 4640723 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer start zk server on port: 33987
   [junit4]   2> 4640723 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:33987
   [junit4]   2> 4640723 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:33987
   [junit4]   2> 4640723 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 33987
   [junit4]   2> 4640724 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4640726 INFO  (zkConnectionManagerCallback-39825-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4640726 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4640728 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4640729 INFO  (zkConnectionManagerCallback-39827-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4640729 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4640730 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 4640731 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/schema15.xml to /configs/conf1/schema.xml
   [junit4]   2> 4640733 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 4640735 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 4640736 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 4640738 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 4640739 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 4640740 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 4640742 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 4640743 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 4640745 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 4640746 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 4640932 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 4640932 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 4640933 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4640946 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4640946 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4640946 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 4640947 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@60155bb0{/w_ac,null,AVAILABLE}
   [junit4]   2> 4640947 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.AbstractConnector Started ServerConnector@f48dd42{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:41545}
   [junit4]   2> 4640947 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.s.Server Started @4640974ms
   [junit4]   2> 4640947 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/w_ac, solr.data.dir=hdfs://localhost.localdomain:35211/hdfs__localhost.localdomain_35211__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001_tempDir-002_control_data, hostPort=41545, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/control-001/cores, replicaType=NRT}
   [junit4]   2> 4640947 ERROR (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4640947 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4640947 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4640948 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4640948 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr
   [junit4]   2> 4640948 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2020-07-04T08:26:39.979087Z
   [junit4]   2> 4640949 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4640953 INFO  (zkConnectionManagerCallback-39829-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4640953 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4641054 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4641054 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/control-001/solr.xml
   [junit4]   2> 4641058 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4642217 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 4642217 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@3ff6b0f8[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4642218 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@3ff6b0f8[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4642220 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5e2a2a42[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4642220 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5e2a2a42[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4642221 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33987/solr
   [junit4]   2> 4642222 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4642223 INFO  (zkConnectionManagerCallback-39840-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4642223 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4642224 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]-SendThread(127.0.0.1:33987)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x10011ce03e50003.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x10011ce03e50003, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x10011ce03e50003, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:348) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1262) [zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 4642325 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4642326 INFO  (zkConnectionManagerCallback-39842-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4642326 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4642413 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:41545_w_ac
   [junit4]   2> 4642414 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.Overseer Overseer (id=72077170564202500-127.0.0.1:41545_w_ac-n_0000000000) starting
   [junit4]   2> 4642419 INFO  (OverseerStateUpdate-72077170564202500-127.0.0.1:41545_w_ac-n_0000000000) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:41545_w_ac
   [junit4]   2> 4642419 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41545_w_ac
   [junit4]   2> 4642420 INFO  (OverseerStateUpdate-72077170564202500-127.0.0.1:41545_w_ac-n_0000000000) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4642422 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.p.PackageLoader /packages.json updated to version -1
   [junit4]   2> 4642423 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.CoreContainer Not all security plugins configured!  authentication=disabled authorization=disabled.  Solr is only as secure as you make it. Consider configuring authentication/authorization before exposing Solr to users internal or external.  See https://s.apache.org/solrsecurity for more info
   [junit4]   2> 4642443 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 4642463 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3
   [junit4]   2> 4642471 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3
   [junit4]   2> 4642471 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3
   [junit4]   2> 4642472 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/control-001/cores
   [junit4]   2> 4642479 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4642480 INFO  (zkConnectionManagerCallback-39859-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4642480 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4642481 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4642482 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33987/solr ready
   [junit4]   2> 4642483 INFO  (qtp852885739-69944) [n:127.0.0.1:41545_w_ac     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:41545_w_ac&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4642486 INFO  (OverseerThreadFactory-39849-thread-1-processing-n:127.0.0.1:41545_w_ac) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 4642591 INFO  (qtp852885739-69946) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 4642591 INFO  (qtp852885739-69946) [n:127.0.0.1:41545_w_ac     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/metrics params={prefix=CONTAINER.fs.usableSpace,CONTAINER.fs.totalSpace,CONTAINER.cores&wt=javabin&version=2&group=solr.node} status=0 QTime=0
   [junit4]   2> 4642598 INFO  (OverseerStateUpdate-72077170564202500-127.0.0.1:41545_w_ac-n_0000000000) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:41545/w_ac",
   [junit4]   2>   "node_name":"127.0.0.1:41545_w_ac",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 4642800 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac    x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 4642809 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 4642824 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=test
   [junit4]   2> 4642885 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 4642902 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from configset conf1, trusted=true
   [junit4]   2> 4642902 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3
   [junit4]   2> 4642903 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:35211/solr_hdfs_home
   [junit4]   2> 4642903 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 4642903 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:35211/solr_hdfs_home/control_collection/core_node2/data/]
   [junit4]   2> 4642904 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:35211/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
   [junit4]   2> 4642911 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 4642911 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 4642914 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 4642915 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:35211/solr_hdfs_home/control_collection/core_node2/data
   [junit4]   2> 4642929 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:35211/solr_hdfs_home/control_collection/core_node2/data/index
   [junit4]   2> 4642935 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 4642935 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 4642937 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 4642937 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: org.apache.lucene.index.MockRandomMergePolicy@26c76cfe
   [junit4]   2> 4642948 WARN  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A, b=B}}}
   [junit4]   2> 4643020 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 4643020 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 4643020 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 4643029 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: if uncommitted for 15000ms; 
   [junit4]   2> 4643029 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 4643030 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=44, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7292330654194478]
   [junit4]   2> 4643034 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 4643035 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 4643035 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
   [junit4]   2> 4643035 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1671274118057558016
   [junit4]   2> 4643038 INFO  (searcherExecutor-39861-thread-1-processing-n:127.0.0.1:41545_w_ac x:control_collection_shard1_replica_n1 c:control_collection s:shard1 r:core_node2) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1]  Registered new searcher autowarm time: 0 ms
   [junit4]   2> 4643040 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 4643041 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 4643044 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 4643044 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 4643044 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:41545/w_ac/control_collection_shard1_replica_n1/
   [junit4]   2> 4643044 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 4643044 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:41545/w_ac/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 4643044 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/72077170564202500-core_node2-n_0000000000
   [junit4]   2> 4643046 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:41545/w_ac/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 4643148 INFO  (zkCallback-39841-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4643148 INFO  (zkCallback-39841-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4643149 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac c:control_collection s:shard1 r:core_node2 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 4643150 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=350
   [junit4]   2> 4643152 INFO  (qtp852885739-69944) [n:127.0.0.1:41545_w_ac     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 4643251 INFO  (zkCallback-39841-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4643251 INFO  (zkCallback-39841-thread-3) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4643251 INFO  (zkCallback-39841-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4643252 INFO  (qtp852885739-69944) [n:127.0.0.1:41545_w_ac     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:41545_w_ac&wt=javabin&version=2} status=0 QTime=768
   [junit4]   2> 4643252 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: control_collection
   [junit4]   2> 4643356 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4643357 INFO  (zkConnectionManagerCallback-39870-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4643357 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4643358 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4643359 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33987/solr ready
   [junit4]   2> 4643359 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 4643359 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=4&createNodeSet=&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4643362 INFO  (OverseerThreadFactory-39849-thread-2-processing-n:127.0.0.1:41545_w_ac) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 4643363 INFO  (OverseerCollectionConfigSetProcessor-72077170564202500-127.0.0.1:41545_w_ac-n_0000000000) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
   [junit4]   2> 4643566 WARN  (OverseerThreadFactory-39849-thread-2-processing-n:127.0.0.1:41545_w_ac) [n:127.0.0.1:41545_w_ac     ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 4643567 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 4643568 INFO  (qtp852885739-69947) [n:127.0.0.1:41545_w_ac     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=4&createNodeSet=&wt=javabin&version=2} status=0 QTime=209
   [junit4]   2> 4643569 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 4 expected: 4
   [junit4]   2> 4643569 INFO  (watches-39867-thread-1) [     ] o.a.s.c.SolrCloudTestCase active slice count: 4 expected: 4
   [junit4]   2> 4643569 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 4643569 INFO  (watches-39867-thread-1) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 4643569 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 4 expected: 4
   [junit4]   2> 4643569 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 4643570 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 4 expected: 4
   [junit4]   2> 4643570 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 4643570 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=14
   [junit4]   2> 4643726 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-1-001 of type NRT for shard2
   [junit4]   2> 4643727 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 4643727 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 4643727 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5c3f53c7{/w_ac,null,AVAILABLE}
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@25ca7092{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:44879}
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.e.j.s.Server Started @4643756ms
   [junit4]   2> 4643728 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/w_ac, solrconfig=solrconfig.xml, solr.data.dir=hdfs://localhost.localdomain:35211/hdfs__localhost.localdomain_35211__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001_tempDir-002_jetty1, hostPort=44879, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-1-001/cores, replicaType=NRT}
   [junit4]   2> 4643729 ERROR (closeThreadPool-39871-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4643729 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4643729 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4643729 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4643729 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr
   [junit4]   2> 4643729 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2020-07-04T08:26:42.760205Z
   [junit4]   2> 4643733 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4643736 INFO  (zkConnectionManagerCallback-39873-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4643736 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4643737 WARN  (closeThreadPool-39871-thread-1-SendThread(127.0.0.1:33987)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x10011ce03e50007.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x10011ce03e50007, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x10011ce03e50007, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:348) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1262) [zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 4643838 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4643838 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-1-001/solr.xml
   [junit4]   2> 4643842 INFO  (closeThreadPool-39871-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4643886 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-2-001 of type NRT for shard3
   [junit4]   2> 4643887 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 4643887 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 4643887 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@75e2c4a1{/w_ac,null,AVAILABLE}
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.AbstractConnector Started ServerConnector@4559137f{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:44525}
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.e.j.s.Server Started @4643929ms
   [junit4]   2> 4643901 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/w_ac, solrconfig=solrconfig.xml, solr.data.dir=hdfs://localhost.localdomain:35211/hdfs__localhost.localdomain_35211__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001_tempDir-002_jetty2, hostPort=44525, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-2-001/cores, replicaType=NRT}
   [junit4]   2> 4643902 ERROR (closeThreadPool-39871-thread-2) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4643902 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4643902 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4643902 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4643902 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr
   [junit4]   2> 4643902 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2020-07-04T08:26:42.933285Z
   [junit4]   2> 4643904 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4643905 INFO  (zkConnectionManagerCallback-39878-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4643905 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4643906 WARN  (closeThreadPool-39871-thread-2-SendThread(127.0.0.1:33987)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x10011ce03e50008.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x10011ce03e50008, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x10011ce03e50008, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:75) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:348) ~[zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1262) [zookeeper-3.6.1.jar:3.6.1]
   [junit4]   2> 4644007 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4644007 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-2-001/solr.xml
   [junit4]   2> 4644010 INFO  (closeThreadPool-39871-thread-2) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4644056 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-3-001 of type NRT for shard4
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4644057 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3840ebe2{/w_ac,null,AVAILABLE}
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.AbstractConnector Started ServerConnector@65d3bd56{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:34235}
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.e.j.s.Server Started @4644085ms
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/w_ac, solrconfig=solrconfig.xml, solr.data.dir=hdfs://localhost.localdomain:35211/hdfs__localhost.localdomain_35211__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J2_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001_tempDir-002_jetty3, hostPort=34235, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-3-001/cores, replicaType=NRT}
   [junit4]   2> 4644058 ERROR (closeThreadPool-39871-thread-3) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr
   [junit4]   2> 4644058 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2020-07-04T08:26:43.089565Z
   [junit4]   2> 4644059 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 4644060 INFO  (zkConnectionManagerCallback-39883-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4644060 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 4644161 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4644161 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-3-001/solr.xml
   [junit4]   2> 4644166 INFO  (closeThreadPool-39871-thread-3) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@454b0af3, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4644219 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[794A361D30038D6]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 4 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_794A361D30038D6-001/shard-4-001 of type NRT for shard1
   [junit4]   2> 4644229 INFO  (closeThreadPool-39871-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 4644229 INFO  (closeThreadPool-39871-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 4644229 INFO  (closeThreadPool-39871-thread-4) [     ] o.e.j.s.Server jetty-9.4.27.v20200227; built: 2020-02-27T18:37:21.340Z; git: a304fd9f351f337e7c0e2a7c28878dd536149c6c; jvm 11.0.6+8-LTS
   [junit4]   2> 4644312 INFO  (closeThreadPool-39871-thread-4) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4644312 INFO  (closeThreadPool-39871-thread-4) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4644312 INFO  (closeThreadPool-39871-thread-4) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 4644319 INFO  (closeThreadPool-39871-thread-4) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2287d

[...truncated too long message...]

e.solr.handler.UpdateRequestHandler$1.load(UpdateRequestHandler.java:97)
   [junit4]    > 	at org.apache.solr.handler.ContentStreamHandlerBase.handleRequestBody(ContentStreamHandlerBase.java:68)
   [junit4]    > 	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:212)
   [junit4]    > 	at org.apache.solr.core.SolrCore.execute(SolrCore.java:2605)
   [junit4]    > 	at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:812)
   [junit4]    > 	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:588)
   [junit4]    > 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:415)
   [junit4]    > 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:345)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
   [junit4]    > 	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]    > 	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
   [junit4]    > 	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
   [junit4]    > 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]    > 	at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
   [junit4]    > 	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)
   [junit4]    > 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]    > 	at org.eclipse.jetty.server.Server.handle(Server.java:500)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:335)
   [junit4]    > 	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
   [junit4]    > 	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]    >  expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [HdfsTransactionLog]
   [junit4]    > org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.update.HdfsTransactionLog
   [junit4]    > 	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
   [junit4]    > 	at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:129)
   [junit4]    > 	at org.apache.solr.update.HdfsTransactionLog.<init>(HdfsTransactionLog.java:76)
   [junit4]    > 	at org.apache.solr.update.HdfsUpdateLog.ensureLog(HdfsUpdateLog.java:341)
   [junit4]    > 	at org.apache.solr.update.UpdateLog.deleteByQuery(UpdateLog.java:664)
   [junit4]    > 	at org.apache.solr.update.DirectUpdateHandler2.deleteByQuery(DirectUpdateHandler2.java:532)
   [junit4]    > 	at org.apache.solr.update.processor.RunUpdateProcessorFactory$RunUpdateProcessor.processDelete(RunUpdateProcessorFactory.java:83)
   [junit4]    > 	at org.apache.solr.update.processor.UpdateRequestProcessor.processDelete(UpdateRequestProcessor.java:59)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDelete(DistributedUpdateProcessor.java:265)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedUpdateProcessor.doLocalDeleteByQuery(DistributedUpdateProcessor.java:913)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedUpdateProcessor.versionDeleteByQuery(DistributedUpdateProcessor.java:884)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedUpdateProcessor.doDeleteByQuery(DistributedUpdateProcessor.java:845)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedZkUpdateProcessor.doDeleteByQuery(DistributedZkUpdateProcessor.java:458)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedUpdateProcessor.processDelete(DistributedUpdateProcessor.java:748)
   [junit4]    > 	at org.apache.solr.update.processor.DistributedZkUpdateProcessor.processDelete(DistributedZkUpdateProcessor.java:313)
   [junit4]    > 	at org.apache.solr.update.processor.LogUpdateProcessorFactory$LogUpdateProcessor.processDelete(LogUpdateProcessorFactory.java:129)
   [junit4]    > 	at org.apache.solr.handler.loader.JavabinLoader.delete(JavabinLoader.java:211)
   [junit4]    > 	at org.apache.solr.handler.loader.JavabinLoader.parseAndLoadDocs(JavabinLoader.java:127)
   [junit4]    > 	at org.apache.solr.handler.loader.JavabinLoader.load(JavabinLoader.java:70)
   [junit4]    > 	at org.apache.solr.handler.UpdateRequestHandler$1.load(UpdateRequestHandler.java:97)
   [junit4]    > 	at org.apache.solr.handler.ContentStreamHandlerBase.handleRequestBody(ContentStreamHandlerBase.java:68)
   [junit4]    > 	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:212)
   [junit4]    > 	at org.apache.solr.core.SolrCore.execute(SolrCore.java:2605)
   [junit4]    > 	at org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:812)
   [junit4]    > 	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:588)
   [junit4]    > 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:415)
   [junit4]    > 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:345)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
   [junit4]    > 	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]    > 	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1610)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1300)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
   [junit4]    > 	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
   [junit4]    > 	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1580)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1215)
   [junit4]    > 	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
   [junit4]    > 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]    > 	at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
   [junit4]    > 	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)
   [junit4]    > 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]    > 	at org.eclipse.jetty.server.Server.handle(Server.java:500)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
   [junit4]    > 	at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:335)
   [junit4]    > 	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
   [junit4]    > 	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]    > >
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([794A361D30038D6]:0)
   [junit4]    > 	at org.apache.solr.SolrTestCaseJ4.teardownTestCases(SolrTestCaseJ4.java:335)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]    > 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]    > 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4] Completed [600/915 (1!)] on J2 in 145.99s, 1 test, 1 failure <<< FAILURES!

[...truncated 53070 lines...]