You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Policeman Jenkins Server <je...@thetaphi.de> on 2017/10/02 12:25:36 UTC

[JENKINS] Lucene-Solr-7.x-MacOSX (64bit/jdk1.8.0) - Build # 222 - Still Failing!

Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-MacOSX/222/
Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseSerialGC

2 tests failed.
FAILED:  org.apache.lucene.TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler

Error Message:


Stack Trace:
java.lang.AssertionError
	at __randomizedtesting.SeedInfo.seed([D853AFC32C37680:8A04875136E30C84]:0)
	at org.junit.Assert.fail(Assert.java:92)
	at org.junit.Assert.assertTrue(Assert.java:43)
	at org.junit.Assert.assertTrue(Assert.java:54)
	at org.apache.lucene.TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler(TestMergeSchedulerExternal.java:147)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)


FAILED:  org.apache.solr.cloud.CollectionsAPIDistributedZkTest.testCollectionsAPI

Error Message:
Error from server at http://127.0.0.1:53192/solr/awhollynewcollection_0_shard2_replica_n2: ClusterState says we are the leader (http://127.0.0.1:53192/solr/awhollynewcollection_0_shard2_replica_n2), but locally we don't think so. Request came from null

Stack Trace:
org.apache.solr.client.solrj.impl.CloudSolrClient$RouteException: Error from server at http://127.0.0.1:53192/solr/awhollynewcollection_0_shard2_replica_n2: ClusterState says we are the leader (http://127.0.0.1:53192/solr/awhollynewcollection_0_shard2_replica_n2), but locally we don't think so. Request came from null
	at __randomizedtesting.SeedInfo.seed([AC327EC2E4BDF7B2:E4470A76E28ED827]:0)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.directUpdate(CloudSolrClient.java:539)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:993)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:862)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:793)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:178)
	at org.apache.solr.client.solrj.request.UpdateRequest.commit(UpdateRequest.java:233)
	at org.apache.solr.cloud.CollectionsAPIDistributedZkTest.testCollectionsAPI(CollectionsAPIDistributedZkTest.java:459)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://127.0.0.1:53192/solr/awhollynewcollection_0_shard2_replica_n2: ClusterState says we are the leader (http://127.0.0.1:53192/solr/awhollynewcollection_0_shard2_replica_n2), but locally we don't think so. Request came from null
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:627)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:253)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:242)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:483)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:413)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.lambda$directUpdate$0(CloudSolrClient.java:516)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:188)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	... 1 more




Build Log:
[...truncated 249 lines...]
   [junit4] Suite: org.apache.lucene.TestMergeSchedulerExternal
   [junit4]   1> TEST FAILED; IW infoStream output:
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.424Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: init: current segments file is "segments"; deletionPolicy=org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@6a5ba0f4
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.437Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "" [0 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.437Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> IW 0 [2017-10-02T11:05:00.437Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: init: create=true
   [junit4]   1> IW 0 [2017-10-02T11:05:00.437Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 
   [junit4]   1> dir=MockDirectoryWrapper(RAMDirectory@659905c5 lockFactory=org.apache.lucene.store.SingleInstanceLockFactory@749590d1)
   [junit4]   1> index=
   [junit4]   1> version=7.1.0
   [junit4]   1> analyzer=org.apache.lucene.analysis.MockAnalyzer
   [junit4]   1> ramBufferSizeMB=-1.0
   [junit4]   1> maxBufferedDocs=2
   [junit4]   1> mergedSegmentWarmer=null
   [junit4]   1> delPolicy=org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy
   [junit4]   1> commit=null
   [junit4]   1> openMode=CREATE_OR_APPEND
   [junit4]   1> similarity=org.apache.lucene.search.similarities.RandomSimilarity
   [junit4]   1> mergeScheduler=MyMergeScheduler: maxThreadCount=-1, maxMergeCount=-1, ioThrottle=true
   [junit4]   1> codec=Asserting(Lucene70): {}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> infoStream=org.apache.lucene.util.PrintStreamInfoStream
   [junit4]   1> mergePolicy=[LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=42, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
   [junit4]   1> indexerThreadPool=org.apache.lucene.index.DocumentsWriterPerThreadPool@40128d59
   [junit4]   1> readerPooling=true
   [junit4]   1> perThreadHardLimitMB=1945
   [junit4]   1> useCompoundFile=false
   [junit4]   1> commitOnClose=true
   [junit4]   1> indexSort=null
   [junit4]   1> writer=org.apache.lucene.index.IndexWriter@13b29a56
   [junit4]   1> 
   [junit4]   1> IW 0 [2017-10-02T11:05:00.437Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: MMapDirectory.UNMAP_SUPPORTED=true
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.441Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _0 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:00.441Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:00.441Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:00.441Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:00.447Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 5 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:00.465Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 18 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:00.490Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 24 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.490Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.490Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.490Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_0_LuceneVarGapDocFreqInterval_0.tiv, _0_LuceneVarGapDocFreqInterval_0.tib, _0.fdx, _0.fdt, _0_LuceneVarGapDocFreqInterval_0.doc, _0.fnm]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.490Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.531Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_0 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.575Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 134.286437 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _0(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=1 now completedDelGen=1
   [junit4]   1> IW 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=1 seg=_0(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2" [1 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.576Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 1 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.642Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.642Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 1 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:00.643Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: initDynamicDefaults spins=false maxThreadCount=1 maxMergeCount=6
   [junit4]   1> MS 0 [2017-10-02T11:05:00.643Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:00.643Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:00.643Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.649Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _1 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:00.649Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:00.649Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:00.649Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:00.654Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:00.681Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 27 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:00.705Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 23 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.705Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.705Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.705Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_1_LuceneVarGapDocFreqInterval_0.tib, _1_LuceneVarGapDocFreqInterval_0.doc, _1.fdx, _1_LuceneVarGapDocFreqInterval_0.tiv, _1.fnm, _1.fdt]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.705Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.726Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_1 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 83.845435 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _1(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=2 now completedDelGen=2
   [junit4]   1> IW 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=2 seg=_1(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2" [2 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 2 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.733Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.779Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.779Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 2 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:00.779Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:00.779Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:00.779Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.793Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _2 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:00.793Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:00.793Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:00.793Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:00.797Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:00.848Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 50 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:00.852Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.852Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.852Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.852Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_2_LuceneVarGapDocFreqInterval_0.tiv, _2_LuceneVarGapDocFreqInterval_0.tib, _2.fdt, _2.fnm, _2.fdx, _2_LuceneVarGapDocFreqInterval_0.doc]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.852Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.877Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_2 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 132.096039 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _2(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=3 now completedDelGen=3
   [junit4]   1> IW 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=3 seg=_2(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2" [3 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 3 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.925Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.947Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:00.947Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 3 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:00.947Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:00.947Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:00.947Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.953Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _3 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:00.953Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:00.953Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:00.953Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:00.957Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:00.981Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 23 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:00.991Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 10 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.991Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.991Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.991Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_3.fdx, _3_LuceneVarGapDocFreqInterval_0.tib, _3_LuceneVarGapDocFreqInterval_0.tiv, _3.fnm, _3_LuceneVarGapDocFreqInterval_0.doc, _3.fdt]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:00.991Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.004Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_3 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.009Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 56.830786 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.009Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _3(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=4 now completedDelGen=4
   [junit4]   1> IW 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=4 seg=_3(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2" [4 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 4 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.010Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.013Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.013Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 4 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:01.013Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.013Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.013Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.026Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _4 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.026Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:01.026Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:01.026Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:01.041Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 14 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:01.073Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 31 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:01.081Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 7 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.081Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.081Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.081Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_4.fdt, _4.fnm, _4_LuceneVarGapDocFreqInterval_0.doc, _4_LuceneVarGapDocFreqInterval_0.tib, _4.fdx, _4_LuceneVarGapDocFreqInterval_0.tiv]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.081Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.081Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_4 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 61.461512 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _4(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=5 now completedDelGen=5
   [junit4]   1> IW 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=5 seg=_4(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2" [5 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 5 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.088Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.089Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_4(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.089Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 5 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:01.089Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.089Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.089Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.091Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _5 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.091Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:01.091Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:01.091Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:01.094Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 2 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:01.122Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 27 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:01.127Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.127Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.127Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.127Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_5.fnm, _5_LuceneVarGapDocFreqInterval_0.tib, _5.fdt, _5_LuceneVarGapDocFreqInterval_0.tiv, _5_LuceneVarGapDocFreqInterval_0.doc, _5.fdx]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.127Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.130Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_5 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 46.314564 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _5(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=6 now completedDelGen=6
   [junit4]   1> IW 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=6 seg=_5(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2" [6 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 6 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.138Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_4(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.140Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_5(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.140Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 6 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:01.140Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.140Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.140Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.143Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _6 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.143Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:01.143Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:01.143Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:01.149Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 6 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:01.169Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 20 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:01.174Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 5 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.175Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.175Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.175Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_6_LuceneVarGapDocFreqInterval_0.tiv, _6.fdt, _6_LuceneVarGapDocFreqInterval_0.doc, _6.fnm, _6.fdx, _6_LuceneVarGapDocFreqInterval_0.tib]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.175Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.189Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_6 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.198Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 55.443249 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.198Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.198Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _6(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.198Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=7 now completedDelGen=7
   [junit4]   1> IW 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=7 seg=_6(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2" [7 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 7 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_4(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.199Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_5(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.205Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_6(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.205Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 7 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:01.205Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.205Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.205Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.209Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _7 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.209Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:01.209Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:01.209Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:01.213Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:01.232Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 18 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:01.236Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.236Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.236Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.236Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_7_LuceneVarGapDocFreqInterval_0.doc, _7_LuceneVarGapDocFreqInterval_0.tib, _7.fnm, _7.fdt, _7_LuceneVarGapDocFreqInterval_0.tiv, _7.fdx]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.236Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.236Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_7 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.245Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 36.610553 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.245Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.245Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _7(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.245Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=8 now completedDelGen=8
   [junit4]   1> IW 0 [2017-10-02T11:05:01.245Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=8 seg=_7(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2" [8 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 8 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_4(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_5(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.246Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_6(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.252Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_7(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.252Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 8 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:01.252Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.252Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.252Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.257Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _8 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.257Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:01.257Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:01.257Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:01.261Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:01.275Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 13 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:01.280Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.280Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.280Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.280Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_8.fdx, _8.fdt, _8_LuceneVarGapDocFreqInterval_0.tiv, _8.fnm, _8_LuceneVarGapDocFreqInterval_0.tib, _8_LuceneVarGapDocFreqInterval_0.doc]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.280Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.280Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_8 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 28.973195 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _8(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=9 now completedDelGen=9
   [junit4]   1> IW 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=9 seg=_8(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2" [9 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 9 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_4(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_5(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_6(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.286Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_7(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.287Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_8(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.287Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 9 segments
   [junit4]   1> MS 0 [2017-10-02T11:05:01.287Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.288Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.288Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.291Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush postings as segment _9 numDocs=2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.291Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write norms
   [junit4]   1> IW 0 [2017-10-02T11:05:01.291Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write docValues
   [junit4]   1> IW 0 [2017-10-02T11:05:01.291Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to write points
   [junit4]   1> IW 0 [2017-10-02T11:05:01.295Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 4 msec to finish stored fields
   [junit4]   1> IW 0 [2017-10-02T11:05:01.316Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 20 msec to write postings and finish vectors
   [junit4]   1> IW 0 [2017-10-02T11:05:01.321Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 5 msec to write fieldInfos
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.321Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has 0 deleted docs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.321Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: new segment has no vectors; no norms; no docValues; no prox; no freqs
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.321Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushedFiles=[_9.fdx, _9_LuceneVarGapDocFreqInterval_0.tib, _9.fdt, _9_LuceneVarGapDocFreqInterval_0.doc, _9.fnm, _9_LuceneVarGapDocFreqInterval_0.tiv]
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.321Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.324Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flushed: segment=_9 ramUsed=0.071 MB newFlushedSize=0.001 MB docs/MB=2,661.36
   [junit4]   1> DWPT 0 [2017-10-02T11:05:01.339Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: flush time 48.731388 msec
   [junit4]   1> DW 0 [2017-10-02T11:05:01.339Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment seg-private updates=null
   [junit4]   1> IW 0 [2017-10-02T11:05:01.339Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publishFlushedSegment _9(7.1.0):C2
   [junit4]   1> BD 0 [2017-10-02T11:05:01.339Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: finished packet delGen=10 now completedDelGen=10
   [junit4]   1> IW 0 [2017-10-02T11:05:01.339Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: publish sets newSegment delGen=10 seg=_9(7.1.0):C2
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.339Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now checkpoint "_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2" [10 segments ; isCommit = false]
   [junit4]   1> IFD 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: 0 msec to checkpoint
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: findMerges: 10 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_0(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_1(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_2(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_3(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_4(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_5(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_6(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_7(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.340Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_8(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.343Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: seg=_9(7.1.0):C2 level=3.1085649 size=0.001 MB
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.343Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   level -1.0 to 3.1085649: 10 segments
   [junit4]   1> LMP 0 [2017-10-02T11:05:01.344Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   add merge=_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2 start=0 end=10
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: add merge to pendingMerges: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2 [total 1 pending]
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge merging= []
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_0(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_1(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_2(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_3(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_4(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_5(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_6(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_7(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_8(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: registerMerge info=_9(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: now merge
   [junit4]   1> MS 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   index: _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.346Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   consider merge _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2
   [junit4]   1> MS 0 [2017-10-02T11:05:01.348Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:     launch new thread [MyMergeThread]
   [junit4]   1> MS 0 [2017-10-02T11:05:01.348Z; MyMergeThread]:   merge thread: start
   [junit4]   1> BD 0 [2017-10-02T11:05:01.349Z; MyMergeThread]: waitApplyForMerge: 0 packets, 10 merging segments
   [junit4]   1> BD 0 [2017-10-02T11:05:01.349Z; MyMergeThread]: waitApply: no deletes to apply
   [junit4]   1> IW 0 [2017-10-02T11:05:01.349Z; MyMergeThread]: now apply deletes for 10 merging segments
   [junit4]   1> IW 0 [2017-10-02T11:05:01.350Z; MyMergeThread]: merge seg=_a _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.350Z; MyMergeThread]: now merge
   [junit4]   1>   merge=_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2
   [junit4]   1>   index=_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.351Z; MyMergeThread]: merging _0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2
   [junit4]   1> IW 0 [2017-10-02T11:05:01.351Z; MyMergeThread]: TEST: now throw exc:
   [junit4]   1> java.io.IOException: now failing during merge
   [junit4]   1> 	at org.apache.lucene.TestMergeSchedulerExternal$FailOnlyOnMerge.eval(TestMergeSchedulerExternal.java:95)
   [junit4]   1> 	at org.apache.lucene.store.MockDirectoryWrapper.maybeThrowDeterministicException(MockDirectoryWrapper.java:1022)
   [junit4]   1> 	at org.apache.lucene.store.MockDirectoryWrapper.openInput(MockDirectoryWrapper.java:743)
   [junit4]   1> 	at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:119)
   [junit4]   1> 	at org.apache.lucene.store.MockDirectoryWrapper.openChecksumInput(MockDirectoryWrapper.java:1072)
   [junit4]   1> 	at org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat.read(Lucene60FieldInfosFormat.java:113)
   [junit4]   1> 	at org.apache.lucene.index.SegmentCoreReaders.<init>(SegmentCoreReaders.java:107)
   [junit4]   1> 	at org.apache.lucene.index.SegmentReader.<init>(SegmentReader.java:78)
   [junit4]   1> 	at org.apache.lucene.index.ReadersAndUpdates.getReader(ReadersAndUpdates.java:208)
   [junit4]   1> 	at org.apache.lucene.index.ReadersAndUpdates.getReaderForMerge(ReadersAndUpdates.java:836)
   [junit4]   1> 	at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:4362)
   [junit4]   1> 	at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:4032)
   [junit4]   1> 	at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:624)
   [junit4]   1> 	at org.apache.lucene.TestMergeSchedulerExternal$MyMergeScheduler.doMerge(TestMergeSchedulerExternal.java:85)
   [junit4]   1> 	at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:661)
   [junit4]   1> 
   [junit4]   1> IW 0 [2017-10-02T11:05:01.352Z; MyMergeThread]: handleMergeException: merge=_0(7.1.0):C2 _1(7.1.0):C2 _2(7.1.0):C2 _3(7.1.0):C2 _4(7.1.0):C2 _5(7.1.0):C2 _6(7.1.0):C2 _7(7.1.0):C2 _8(7.1.0):C2 _9(7.1.0):C2 exc=java.io.IOException: now failing during merge
   [junit4]   1> IW 0 [2017-10-02T11:05:01.352Z; MyMergeThread]: hit exception during merge
   [junit4]   1> IW 0 [2017-10-02T11:05:01.352Z; MyMergeThread]: hit tragic IOException inside merge
   [junit4]   1> IW 0 [2017-10-02T11:05:01.352Z; MyMergeThread]: rollback
   [junit4]   1> IW 0 [2017-10-02T11:05:01.352Z; MyMergeThread]: all running merges have aborted
   [junit4]   1> IW 0 [2017-10-02T11:05:01.352Z; MyMergeThread]: rollback: done finish merges
   [junit4]   1> MS 0 [2017-10-02T11:05:01.355Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]: updateMergeThreads ioThrottle=true targetMBPerSec=20.0 MB/sec
   [junit4]   1> merge thread MyMergeThread estSize=0.0 MB (written=0.0 MB) runTime=0.0s (stopped=0.0s, paused=0.0s) rate=unlimited
   [junit4]   1>   leave running at Infinity MB/sec
   [junit4]   1> MS 0 [2017-10-02T11:05:01.355Z; TEST-TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler-seed#[D853AFC32C37680]]:   no more merges pending; now return
   [junit4]   1> DW 0 [2017-10-02T11:05:01.355Z; MyMergeThread]: abort
   [junit4]   1> DW 0 [2017-10-02T11:05:01.355Z; MyMergeThread]: done abort success=true
   [junit4]   1> IW 0 [2017-10-02T11:05:01.355Z; MyMergeThread]: rollback: infos=
   [junit4]   1> IW 0 [2017-10-02T11:05:01.356Z; MyMergeThread]: TEST: now handleMergeException
   [junit4]   1> MS 0 [2017-10-02T11:05:01.356Z; MyMergeThread]: updateMergeThreads ioThrottle=true targetMBPerSec=20.0 MB/sec
   [junit4]   1> 
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=TestMergeSchedulerExternal -Dtests.method=testSubclassConcurrentMergeScheduler -Dtests.seed=D853AFC32C37680 -Dtests.slow=true -Dtests.locale=zh-HK -Dtests.timezone=America/Buenos_Aires -Dtests.asserts=true -Dtests.file.encoding=ISO-8859-1
   [junit4] FAILURE 1.01s J1 | TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler <<<
   [junit4]    > Throwable #1: java.lang.AssertionError
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([D853AFC32C37680:8A04875136E30C84]:0)
   [junit4]    > 	at org.apache.lucene.TestMergeSchedulerExternal.testSubclassConcurrentMergeScheduler(TestMergeSchedulerExternal.java:147)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:748)
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): {id=PostingsFormat(name=LuceneVarGapDocFreqInterval)}, docValues:{}, maxPointsInLeafNode=1073, maxMBSortInHeap=7.924571263640305, sim=RandomSimilarity(queryNorm=false): {}, locale=zh-HK, timezone=America/Buenos_Aires
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_144 (64-bit)/cpus=3,threads=1,free=42243016,total=54788096
   [junit4]   2> NOTE: All tests run in this JVM: [TestSpanMultiTermQueryWrapper, TestSimilarityBase, TestMergeSchedulerExternal]
   [junit4] Completed [9/454 (1!)] on J1 in 1.09s, 2 tests, 1 failure <<< FAILURES!

[...truncated 11806 lines...]
   [junit4] Suite: org.apache.solr.cloud.CollectionsAPIDistributedZkTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.CollectionsAPIDistributedZkTest_AC327EC2E4BDF7B2-001/init-core-data-001
   [junit4]   2> 481465 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 481466 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 481466 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 481468 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.CollectionsAPIDistributedZkTest_AC327EC2E4BDF7B2-001/tempDir-001
   [junit4]   2> 481468 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 481469 INFO  (Thread-1432) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 481469 INFO  (Thread-1432) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 481475 ERROR (Thread-1432) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 481579 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.c.ZkTestServer start zk server on port:53187
   [junit4]   2> 481614 INFO  (jetty-launcher-908-thread-1) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 481615 INFO  (jetty-launcher-908-thread-2) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 481619 INFO  (jetty-launcher-908-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@225d159a{/solr,null,AVAILABLE}
   [junit4]   2> 481621 INFO  (jetty-launcher-908-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6faa5c79{/solr,null,AVAILABLE}
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@69231d33{HTTP/1.1,[http/1.1]}{127.0.0.1:53190}
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.e.j.s.Server Started @484820ms
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=53190}
   [junit4]   2> 481623 ERROR (jetty-launcher-908-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 481623 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T11:29:09.681Z
   [junit4]   2> 481624 INFO  (jetty-launcher-908-thread-3) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 481625 INFO  (jetty-launcher-908-thread-4) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 481626 INFO  (jetty-launcher-908-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1276359a{HTTP/1.1,[http/1.1]}{127.0.0.1:53189}
   [junit4]   2> 481626 INFO  (jetty-launcher-908-thread-1) [    ] o.e.j.s.Server Started @484823ms
   [junit4]   2> 481626 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=53189}
   [junit4]   2> 481626 ERROR (jetty-launcher-908-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 481626 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 481628 INFO  (jetty-launcher-908-thread-4) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@d13a587{/solr,null,AVAILABLE}
   [junit4]   2> 481628 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T11:29:09.687Z
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7241fba5{HTTP/1.1,[http/1.1]}{127.0.0.1:53192}
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.e.j.s.Server Started @484826ms
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=53192}
   [junit4]   2> 481629 ERROR (jetty-launcher-908-thread-4) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T11:29:09.687Z
   [junit4]   2> 481629 INFO  (jetty-launcher-908-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@538b75f0{/solr,null,AVAILABLE}
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@267e8b0d{HTTP/1.1,[http/1.1]}{127.0.0.1:53193}
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.e.j.s.Server Started @484826ms
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=53193}
   [junit4]   2> 481630 ERROR (jetty-launcher-908-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 481630 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T11:29:09.688Z
   [junit4]   2> 481631 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 481634 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 481634 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 481634 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 481658 INFO  (jetty-launcher-908-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53187/solr
   [junit4]   2> 481658 INFO  (jetty-launcher-908-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53187/solr
   [junit4]   2> 481659 INFO  (jetty-launcher-908-thread-4) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53187/solr
   [junit4]   2> 481660 INFO  (jetty-launcher-908-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53187/solr
   [junit4]   2> 481834 INFO  (jetty-launcher-908-thread-1) [n:127.0.0.1:53189_solr    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 481834 INFO  (jetty-launcher-908-thread-2) [n:127.0.0.1:53190_solr    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 481834 INFO  (jetty-launcher-908-thread-4) [n:127.0.0.1:53192_solr    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 481834 INFO  (jetty-launcher-908-thread-3) [n:127.0.0.1:53193_solr    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 481835 INFO  (jetty-launcher-908-thread-4) [n:127.0.0.1:53192_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:53192_solr
   [junit4]   2> 481835 INFO  (jetty-launcher-908-thread-1) [n:127.0.0.1:53189_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:53189_solr
   [junit4]   2> 481835 INFO  (jetty-launcher-908-thread-2) [n:127.0.0.1:53190_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:53190_solr
   [junit4]   2> 481836 INFO  (jetty-launcher-908-thread-3) [n:127.0.0.1:53193_solr    ] o.a

[...truncated too long message...]

lrMetricManager Closing metric reporters for registry=solr.cluster, tag=null
   [junit4]   2> 564259 INFO  (jetty-closer-909-thread-4) [    ] o.a.s.c.Overseer Overseer (id=98759065569198092-127.0.0.1:53189_solr-n_0000000001) closing
   [junit4]   2> 564259 INFO  (OverseerStateUpdate-98759065569198092-127.0.0.1:53189_solr-n_0000000001) [n:127.0.0.1:53189_solr    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:53189_solr
   [junit4]   2> 564259 INFO  (jetty-closer-909-thread-1) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.cluster, tag=null
   [junit4]   2> 564260 INFO  (jetty-closer-909-thread-2) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.cluster, tag=null
   [junit4]   2> 564263 INFO  (zkCallback-929-thread-5-processing-n:127.0.0.1:53193_solr) [n:127.0.0.1:53193_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (2)
   [junit4]   2> 564263 INFO  (zkCallback-930-thread-5-processing-n:127.0.0.1:53189_solr) [n:127.0.0.1:53189_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (2)
   [junit4]   2> 564266 INFO  (zkCallback-929-thread-2-processing-n:127.0.0.1:53193_solr) [n:127.0.0.1:53193_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1)
   [junit4]   2> 564266 INFO  (zkCallback-929-thread-5-processing-n:127.0.0.1:53193_solr) [n:127.0.0.1:53193_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:53193_solr
   [junit4]   2> 564267 WARN  (zkCallback-929-thread-2-processing-n:127.0.0.1:53193_solr) [n:127.0.0.1:53193_solr    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 565766 WARN  (zkCallback-944-thread-2-processing-n:127.0.0.1:53192_solr) [n:127.0.0.1:53192_solr    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 565766 WARN  (zkCallback-927-thread-4-processing-n:127.0.0.1:53190_solr) [n:127.0.0.1:53190_solr    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 565766 WARN  (zkCallback-930-thread-5-processing-n:127.0.0.1:53189_solr) [n:127.0.0.1:53189_solr    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 565766 INFO  (jetty-closer-909-thread-2) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@48b7342b{/solr,null,UNAVAILABLE}
   [junit4]   2> 565766 INFO  (jetty-closer-909-thread-3) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@6faa5c79{/solr,null,UNAVAILABLE}
   [junit4]   2> 565767 INFO  (jetty-closer-909-thread-4) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@225d159a{/solr,null,UNAVAILABLE}
   [junit4]   2> 565776 INFO  (jetty-closer-909-thread-1) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@538b75f0{/solr,null,UNAVAILABLE}
   [junit4]   2> 565778 ERROR (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 565778 INFO  (SUITE-CollectionsAPIDistributedZkTest-seed#[AC327EC2E4BDF7B2]-worker) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:53187 53187
   [junit4]   2> 565779 INFO  (Thread-1478) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:53187 53187
   [junit4]   2> 565894 WARN  (Thread-1478) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	22	/solr/configs/conf
   [junit4]   2> 	18	/solr/aliases.json
   [junit4]   2> 	18	/solr/clusterprops.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	52	/solr/collections/addReplicaColl/state.json
   [junit4]   2> 	24	/solr/collections/nodes_used_collection/state.json
   [junit4]   2> 	24	/solr/collections/reloaded_collection/state.json
   [junit4]   2> 	18	/solr/clusterstate.json
   [junit4]   2> 	15	/solr/collections/acollectionafterbaddelete/state.json
   [junit4]   2> 	14	/solr/collections/halfdeletedcollection/state.json
   [junit4]   2> 	10	/solr/collections/onlyinzk/state.json
   [junit4]   2> 	6	/solr/collections/awhollynewcollection_0/state.json
   [junit4]   2> 	5	/solr/collections/halfcollectionblocker/state.json
   [junit4]   2> 	5	/solr/collections/created_and_deleted/state.json
   [junit4]   2> 	5	/solr/collections/halfcollectionblocker2/state.json
   [junit4]   2> 	5	/solr/collections/withconfigset2/state.json
   [junit4]   2> 	4	/solr/collections/halfcollection/state.json
   [junit4]   2> 	2	/solr/collections/addReplicaColl/leader_elect/shard1/election/98759065569198089-core_node10-n_0000000002
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	358	/solr/collections
   [junit4]   2> 	18	/solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.CollectionsAPIDistributedZkTest_AC327EC2E4BDF7B2-001
   [junit4]   2> Oct 02, 2017 11:30:33 AM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 1 leaked thread(s).
   [junit4]   2> NOTE: test params are: codec=CheapBastard, sim=RandomSimilarity(queryNorm=true): {}, locale=lt-LT, timezone=Asia/Sakhalin
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_144 (64-bit)/cpus=3,threads=1,free=288788872,total=429363200
   [junit4]   2> NOTE: All tests run in this JVM: [OverseerRolesTest, TestCoreDiscovery, MoreLikeThisHandlerTest, CoreMergeIndexesAdminHandlerTest, TestLegacyTerms, JavabinLoaderTest, TestZkChroot, TestJoin, SimpleMLTQParserTest, TestSQLHandlerNonCloud, DataDrivenBlockJoinTest, CoreAdminCreateDiscoverTest, MultiThreadedOCPTest, TestTrie, BigEndianAscendingWordDeserializerTest, StatelessScriptUpdateProcessorFactoryTest, TestChildDocTransformer, TemplateUpdateProcessorTest, ShardRoutingCustomTest, BasicFunctionalityTest, TestCopyFieldCollectionResource, TestSolrCLIRunExample, DeleteNodeTest, ImplicitSnitchTest, LeaderInitiatedRecoveryOnShardRestartTest, SampleTest, TestRandomCollapseQParserPlugin, TestRestoreCore, SharedFSAutoReplicaFailoverUtilsTest, SolrCoreCheckLockOnStartupTest, TestStressUserVersions, TestConfigReload, TestAuthorizationFramework, ZkNodePropsTest, TermVectorComponentDistributedTest, FieldMutatingUpdateProcessorTest, SolrCmdDistributorTest, TestPostingsSolrHighlighter, ChangedSchemaMergeTest, TestSimpleTrackingShardHandler, TestCoreAdminApis, TestReload, OutOfBoxZkACLAndCredentialsProvidersTest, TestCharFilters, TestManagedSchemaThreadSafety, TestLegacyFieldReuse, HdfsCollectionsAPIDistributedZkTest, SpatialHeatmapFacetsTest, HdfsUnloadDistributedZkTest, MBeansHandlerTest, TestSolrJ, TestCollationField, TestCollationFieldDocValues, TestRandomFaceting, SearchHandlerTest, TestPartialUpdateDeduplication, TestFieldCacheWithThreads, TestRecovery, TestPivotHelperCode, DistributedQueryElevationComponentTest, BasicZkTest, TestFileDictionaryLookup, TestOverriddenPrefixQueryForCustomFieldType, TestConfigSetImmutable, TestLeaderElectionZkExpiry, DateRangeFieldTest, DistributedFacetPivotWhiteBoxTest, CursorMarkTest, DistributedFacetPivotSmallAdvancedTest, TestCloudManagedSchema, TestCollapseQParserPlugin, SolrIndexMetricsTest, TestCloudPseudoReturnFields, TestCollectionAPIs, SolrRequestParserTest, TermsComponentTest, TestPayloadScoreQParserPlugin, TestLMJelinekMercerSimilarityFactory, TestFieldTypeResource, TestJsonRequest, AutoCommitTest, OverseerCollectionConfigSetProcessorTest, CollectionsAPIDistributedZkTest]
   [junit4] Completed [161/733 (1!)] on J1 in 84.70s, 19 tests, 1 error <<< FAILURES!

[...truncated 39115 lines...]
-documentation-lint:
     [echo] checking for broken html...
    [jtidy] Checking for broken html (such as invalid tags)...
   [delete] Deleting directory /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/build/jtidy_tmp
     [echo] Checking for broken links...
     [exec] 
     [exec] Crawl/parse...
     [exec] 
     [exec] Verify...
     [exec] 
     [exec] file:///build/docs/sandbox/org/apache/lucene/document/FloatPointNearestNeighbor.html
     [exec]   BROKEN LINK: file:///build/docs/core/org/apache/lucene/document/FloatPointNearestNeighbor.NearestHit.html
     [exec]   BROKEN LINK: file:///build/docs/core/org/apache/lucene/document/FloatPointNearestNeighbor.NearestHit.html
     [exec] 
     [exec] Broken javadocs links were found! Common root causes:
     [exec] * A typo of some sort for manually created links.
     [exec] * Public methods referencing non-public classes in their signature.

BUILD FAILED
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/build.xml:826: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/build.xml:101: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/build.xml:142: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/build.xml:155: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/common-build.xml:2570: exec returned: 1

Total time: 80 minutes 56 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
[WARNINGS] Skipping publisher since build result is FAILURE
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-7.x-MacOSX (64bit/jdk1.8.0) - Build # 225 - Still Failing!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-MacOSX/225/
Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseConcMarkSweepGC

All tests passed

Build Log:
[...truncated 60430 lines...]
-documentation-lint:
    [jtidy] Checking for broken html (such as invalid tags)...
   [delete] Deleting directory /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/build/jtidy_tmp
     [echo] Checking for broken links...
     [exec] 
     [exec] Crawl/parse...
     [exec] 
     [exec] Verify...
     [exec] 
     [exec] file:///Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/docs/quickstart.html
     [exec]   BAD EXTERNAL LINK: https://lucene.apache.org/solr/guide/solr-tutorial.html
     [exec] 
     [exec] Broken javadocs links were found! Common root causes:
     [exec] * A typo of some sort for manually created links.
     [exec] * Public methods referencing non-public classes in their signature.

BUILD FAILED
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/build.xml:826: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/build.xml:101: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build.xml:669: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build.xml:682: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/common-build.xml:2570: exec returned: 1

Total time: 87 minutes 25 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
[WARNINGS] Skipping publisher since build result is FAILURE
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-7.x-MacOSX (64bit/jdk1.8.0) - Build # 224 - Failure!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-MacOSX/224/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseParallelGC

19 tests failed.
FAILED:  org.apache.solr.cloud.ClusterStateUpdateTest.testCoreRegistration

Error Message:


Stack Trace:
java.lang.NullPointerException
	at __randomizedtesting.SeedInfo.seed([E78A801B6608F367:5901E6B41F72FD52]:0)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.getNodeName(JettySolrRunner.java:345)
	at org.apache.solr.cloud.ClusterStateUpdateTest.testCoreRegistration(ClusterStateUpdateTest.java:66)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.ClusterStateUpdateTest

Error Message:
28 threads leaked from SUITE scope at org.apache.solr.cloud.ClusterStateUpdateTest:     1) Thread[id=16804, name=jetty-launcher-3706-thread-1-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)         at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)    2) Thread[id=16828, name=zkCallback-3722-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    3) Thread[id=16825, name=zkCallback-3723-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    4) Thread[id=16832, name=zkCallback-3721-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    5) Thread[id=16809, name=OverseerStateUpdate-98764349772726280-127.0.0.1:51701_solr-n_0000000000, state=TIMED_WAITING, group=Overseer state updater.]         at java.lang.Thread.sleep(Native Method)         at org.apache.solr.common.cloud.ZkCmdExecutor.retryDelay(ZkCmdExecutor.java:111)         at org.apache.solr.common.cloud.ZkCmdExecutor.retryOperation(ZkCmdExecutor.java:70)         at org.apache.solr.common.cloud.SolrZkClient.getData(SolrZkClient.java:354)         at org.apache.solr.cloud.Overseer$ClusterStateUpdater.amILeader(Overseer.java:407)         at org.apache.solr.cloud.Overseer$ClusterStateUpdater.run(Overseer.java:133)         at java.lang.Thread.run(Thread.java:748)    6) Thread[id=16829, name=zkCallback-3722-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    7) Thread[id=16788, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)         at java.lang.Thread.run(Thread.java:748)    8) Thread[id=16833, name=zkCallback-3721-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    9) Thread[id=16827, name=zkCallback-3722-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   10) Thread[id=16787, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)         at java.lang.Thread.run(Thread.java:748)   11) Thread[id=16802, name=jetty-launcher-3706-thread-3-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)         at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)   12) Thread[id=16800, name=jetty-launcher-3706-thread-2-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.zookeeper.ClientCnxnSocketNIO.cleanup(ClientCnxnSocketNIO.java:230)         at org.apache.zookeeper.ClientCnxn$SendThread.cleanup(ClientCnxn.java:1246)         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1170)   13) Thread[id=16808, name=zkCallback-3722-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   14) Thread[id=16805, name=jetty-launcher-3706-thread-1-EventThread, state=WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)         at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)         at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442)         at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:501)   15) Thread[id=16785, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)         at java.lang.Thread.run(Thread.java:748)   16) Thread[id=16810, name=OverseerCollectionConfigSetProcessor-98764349772726280-127.0.0.1:51701_solr-n_0000000000, state=TIMED_WAITING, group=Overseer collection creation process.]         at java.lang.Thread.sleep(Native Method)         at org.apache.solr.common.cloud.ZkCmdExecutor.retryDelay(ZkCmdExecutor.java:111)         at org.apache.solr.common.cloud.ZkCmdExecutor.retryOperation(ZkCmdExecutor.java:70)         at org.apache.solr.common.cloud.SolrZkClient.getData(SolrZkClient.java:354)         at org.apache.solr.cloud.OverseerTaskProcessor.amILeader(OverseerTaskProcessor.java:387)         at org.apache.solr.cloud.OverseerTaskProcessor.run(OverseerTaskProcessor.java:193)         at java.lang.Thread.run(Thread.java:748)   17) Thread[id=16789, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)         at java.lang.Thread.run(Thread.java:748)   18) Thread[id=16830, name=zkCallback-3723-thread-5, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   19) Thread[id=16803, name=jetty-launcher-3706-thread-3-EventThread, state=WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)         at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)         at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442)         at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:501)   20) Thread[id=16831, name=zkCallback-3721-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   21) Thread[id=16806, name=zkCallback-3723-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   22) Thread[id=16807, name=zkCallback-3721-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   23) Thread[id=16824, name=zkCallback-3723-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   24) Thread[id=16811, name=OverseerHdfsCoreFailoverThread-98764349772726280-127.0.0.1:51701_solr-n_0000000000, state=TIMED_WAITING, group=Overseer Hdfs SolrCore Failover Thread.]         at java.lang.Thread.sleep(Native Method)         at org.apache.solr.cloud.OverseerAutoReplicaFailoverThread.run(OverseerAutoReplicaFailoverThread.java:139)         at java.lang.Thread.run(Thread.java:748)   25) Thread[id=16786, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)         at java.lang.Thread.run(Thread.java:748)   26) Thread[id=16801, name=jetty-launcher-3706-thread-2-EventThread, state=WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)         at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)         at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442)         at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:501)   27) Thread[id=16826, name=zkCallback-3723-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   28) Thread[id=16790, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)         at java.lang.Thread.run(Thread.java:748)

Stack Trace:
com.carrotsearch.randomizedtesting.ThreadLeakError: 28 threads leaked from SUITE scope at org.apache.solr.cloud.ClusterStateUpdateTest: 
   1) Thread[id=16804, name=jetty-launcher-3706-thread-1-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)
        at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)
   2) Thread[id=16828, name=zkCallback-3722-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   3) Thread[id=16825, name=zkCallback-3723-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   4) Thread[id=16832, name=zkCallback-3721-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   5) Thread[id=16809, name=OverseerStateUpdate-98764349772726280-127.0.0.1:51701_solr-n_0000000000, state=TIMED_WAITING, group=Overseer state updater.]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.solr.common.cloud.ZkCmdExecutor.retryDelay(ZkCmdExecutor.java:111)
        at org.apache.solr.common.cloud.ZkCmdExecutor.retryOperation(ZkCmdExecutor.java:70)
        at org.apache.solr.common.cloud.SolrZkClient.getData(SolrZkClient.java:354)
        at org.apache.solr.cloud.Overseer$ClusterStateUpdater.amILeader(Overseer.java:407)
        at org.apache.solr.cloud.Overseer$ClusterStateUpdater.run(Overseer.java:133)
        at java.lang.Thread.run(Thread.java:748)
   6) Thread[id=16829, name=zkCallback-3722-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   7) Thread[id=16788, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)
        at java.lang.Thread.run(Thread.java:748)
   8) Thread[id=16833, name=zkCallback-3721-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   9) Thread[id=16827, name=zkCallback-3722-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  10) Thread[id=16787, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)
        at java.lang.Thread.run(Thread.java:748)
  11) Thread[id=16802, name=jetty-launcher-3706-thread-3-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)
        at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)
  12) Thread[id=16800, name=jetty-launcher-3706-thread-2-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.zookeeper.ClientCnxnSocketNIO.cleanup(ClientCnxnSocketNIO.java:230)
        at org.apache.zookeeper.ClientCnxn$SendThread.cleanup(ClientCnxn.java:1246)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1170)
  13) Thread[id=16808, name=zkCallback-3722-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  14) Thread[id=16805, name=jetty-launcher-3706-thread-1-EventThread, state=WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)
        at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442)
        at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:501)
  15) Thread[id=16785, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)
        at java.lang.Thread.run(Thread.java:748)
  16) Thread[id=16810, name=OverseerCollectionConfigSetProcessor-98764349772726280-127.0.0.1:51701_solr-n_0000000000, state=TIMED_WAITING, group=Overseer collection creation process.]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.solr.common.cloud.ZkCmdExecutor.retryDelay(ZkCmdExecutor.java:111)
        at org.apache.solr.common.cloud.ZkCmdExecutor.retryOperation(ZkCmdExecutor.java:70)
        at org.apache.solr.common.cloud.SolrZkClient.getData(SolrZkClient.java:354)
        at org.apache.solr.cloud.OverseerTaskProcessor.amILeader(OverseerTaskProcessor.java:387)
        at org.apache.solr.cloud.OverseerTaskProcessor.run(OverseerTaskProcessor.java:193)
        at java.lang.Thread.run(Thread.java:748)
  17) Thread[id=16789, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)
        at java.lang.Thread.run(Thread.java:748)
  18) Thread[id=16830, name=zkCallback-3723-thread-5, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  19) Thread[id=16803, name=jetty-launcher-3706-thread-3-EventThread, state=WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)
        at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442)
        at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:501)
  20) Thread[id=16831, name=zkCallback-3721-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  21) Thread[id=16806, name=zkCallback-3723-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  22) Thread[id=16807, name=zkCallback-3721-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  23) Thread[id=16824, name=zkCallback-3723-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  24) Thread[id=16811, name=OverseerHdfsCoreFailoverThread-98764349772726280-127.0.0.1:51701_solr-n_0000000000, state=TIMED_WAITING, group=Overseer Hdfs SolrCore Failover Thread.]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.solr.cloud.OverseerAutoReplicaFailoverThread.run(OverseerAutoReplicaFailoverThread.java:139)
        at java.lang.Thread.run(Thread.java:748)
  25) Thread[id=16786, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)
        at java.lang.Thread.run(Thread.java:748)
  26) Thread[id=16801, name=jetty-launcher-3706-thread-2-EventThread, state=WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)
        at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442)
        at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:501)
  27) Thread[id=16826, name=zkCallback-3723-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  28) Thread[id=16790, name=Connection evictor, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.http.impl.client.IdleConnectionEvictor$1.run(IdleConnectionEvictor.java:66)
        at java.lang.Thread.run(Thread.java:748)
	at __randomizedtesting.SeedInfo.seed([E78A801B6608F367]:0)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.ClusterStateUpdateTest

Error Message:
There are still zombie threads that couldn't be terminated:    1) Thread[id=16804, name=jetty-launcher-3706-thread-1-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1051)    2) Thread[id=16828, name=zkCallback-3722-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    3) Thread[id=16825, name=zkCallback-3723-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    4) Thread[id=16830, name=zkCallback-3723-thread-5, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    5) Thread[id=16832, name=zkCallback-3721-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    6) Thread[id=16831, name=zkCallback-3721-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    7) Thread[id=16806, name=zkCallback-3723-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    8) Thread[id=16807, name=zkCallback-3721-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)    9) Thread[id=16824, name=zkCallback-3723-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   10) Thread[id=16829, name=zkCallback-3722-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   11) Thread[id=16833, name=zkCallback-3721-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   12) Thread[id=16827, name=zkCallback-3722-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   13) Thread[id=16802, name=jetty-launcher-3706-thread-3-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1051)   14) Thread[id=16826, name=zkCallback-3723-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)   15) Thread[id=16800, name=jetty-launcher-3706-thread-2-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at java.lang.Thread.sleep(Native Method)         at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)         at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)   16) Thread[id=16808, name=zkCallback-3722-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]         at sun.misc.Unsafe.park(Native Method)         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)         at java.lang.Thread.run(Thread.java:748)

Stack Trace:
com.carrotsearch.randomizedtesting.ThreadLeakError: There are still zombie threads that couldn't be terminated:
   1) Thread[id=16804, name=jetty-launcher-3706-thread-1-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1051)
   2) Thread[id=16828, name=zkCallback-3722-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   3) Thread[id=16825, name=zkCallback-3723-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   4) Thread[id=16830, name=zkCallback-3723-thread-5, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   5) Thread[id=16832, name=zkCallback-3721-thread-3, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   6) Thread[id=16831, name=zkCallback-3721-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   7) Thread[id=16806, name=zkCallback-3723-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   8) Thread[id=16807, name=zkCallback-3721-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
   9) Thread[id=16824, name=zkCallback-3723-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  10) Thread[id=16829, name=zkCallback-3722-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  11) Thread[id=16833, name=zkCallback-3721-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  12) Thread[id=16827, name=zkCallback-3722-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  13) Thread[id=16802, name=jetty-launcher-3706-thread-3-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1051)
  14) Thread[id=16826, name=zkCallback-3723-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
  15) Thread[id=16800, name=jetty-launcher-3706-thread-2-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at java.lang.Thread.sleep(Native Method)
        at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)
        at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)
  16) Thread[id=16808, name=zkCallback-3722-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
	at __randomizedtesting.SeedInfo.seed([E78A801B6608F367]:0)


FAILED:  org.apache.solr.cloud.FullSolrCloudDistribCmdsTest.test

Error Message:
java.io.IOException: Couldn't instantiate org.apache.zookeeper.ClientCnxnSocketNIO

Stack Trace:
org.apache.solr.common.SolrException: java.io.IOException: Couldn't instantiate org.apache.zookeeper.ClientCnxnSocketNIO
	at org.apache.solr.common.cloud.SolrZkClient.<init>(SolrZkClient.java:171)
	at org.apache.solr.common.cloud.SolrZkClient.<init>(SolrZkClient.java:117)
	at org.apache.solr.common.cloud.SolrZkClient.<init>(SolrZkClient.java:112)
	at org.apache.solr.common.cloud.SolrZkClient.<init>(SolrZkClient.java:99)
	at org.apache.solr.cloud.AbstractDistribZkTestBase.printLayout(AbstractDistribZkTestBase.java:323)
	at org.apache.solr.cloud.FullSolrCloudDistribCmdsTest.testIndexingBatchPerRequestWithHttpSolrClient(FullSolrCloudDistribCmdsTest.java:660)
	at org.apache.solr.cloud.FullSolrCloudDistribCmdsTest.test(FullSolrCloudDistribCmdsTest.java:152)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:993)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:968)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: Couldn't instantiate org.apache.zookeeper.ClientCnxnSocketNIO
	at org.apache.zookeeper.ZooKeeper.getClientCnxnSocket(ZooKeeper.java:1842)
	at org.apache.zookeeper.ZooKeeper.<init>(ZooKeeper.java:447)
	at org.apache.zookeeper.ZooKeeper.<init>(ZooKeeper.java:380)
	at org.apache.solr.common.cloud.SolrZooKeeper.<init>(SolrZooKeeper.java:43)
	at org.apache.solr.common.cloud.ZkClientConnectionStrategy.createSolrZooKeeper(ZkClientConnectionStrategy.java:105)
	at org.apache.solr.common.cloud.DefaultConnectionStrategy.connect(DefaultConnectionStrategy.java:37)
	at org.apache.solr.common.cloud.SolrZkClient.<init>(SolrZkClient.java:146)
	... 47 more
Caused by: java.io.IOException: Too many open files
	at sun.nio.ch.IOUtil.makePipe(Native Method)
	at sun.nio.ch.KQueueSelectorImpl.<init>(KQueueSelectorImpl.java:84)
	at sun.nio.ch.KQueueSelectorProvider.openSelector(KQueueSelectorProvider.java:42)
	at java.nio.channels.Selector.open(Selector.java:227)
	at org.apache.zookeeper.ClientCnxnSocketNIO.<init>(ClientCnxnSocketNIO.java:43)
	at sun.reflect.GeneratedConstructorAccessor130.newInstance(Unknown Source)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at java.lang.Class.newInstance(Class.java:442)
	at org.apache.zookeeper.ZooKeeper.getClientCnxnSocket(ZooKeeper.java:1839)
	... 53 more


FAILED:  org.apache.solr.handler.TestReplicationHandler.doTestStressReplication

Error Message:
IOException occured when talking to server at: http://127.0.0.1:51573/solr/collection1

Stack Trace:
org.apache.solr.client.solrj.SolrServerException: IOException occured when talking to server at: http://127.0.0.1:51573/solr/collection1
	at __randomizedtesting.SeedInfo.seed([E78A801B6608F367:3C2180DD63209AD4]:0)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:641)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:253)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:242)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:178)
	at org.apache.solr.client.solrj.SolrClient.deleteByQuery(SolrClient.java:895)
	at org.apache.solr.client.solrj.SolrClient.deleteByQuery(SolrClient.java:858)
	at org.apache.solr.client.solrj.SolrClient.deleteByQuery(SolrClient.java:873)
	at org.apache.solr.handler.TestReplicationHandler.doTestStressReplication(TestReplicationHandler.java:850)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.SocketException: Too many open files
	at java.net.Socket.createImpl(Socket.java:460)
	at java.net.Socket.getImpl(Socket.java:520)
	at java.net.Socket.setSoTimeout(Socket.java:1141)
	at org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:120)
	at org.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:359)
	at org.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:381)
	at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:237)
	at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)
	at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)
	at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:111)
	at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
	at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
	at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:526)
	... 46 more


FAILED:  org.apache.solr.handler.TestReplicationHandler.testRateLimitedReplication

Error Message:
IOException occured when talking to server at: http://127.0.0.1:51576/solr/collection1

Stack Trace:
org.apache.solr.client.solrj.SolrServerException: IOException occured when talking to server at: http://127.0.0.1:51576/solr/collection1
	at __randomizedtesting.SeedInfo.seed([E78A801B6608F367:611EF5EE69BA728A]:0)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:641)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:253)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:242)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:178)
	at org.apache.solr.client.solrj.SolrClient.deleteByQuery(SolrClient.java:895)
	at org.apache.solr.client.solrj.SolrClient.deleteByQuery(SolrClient.java:858)
	at org.apache.solr.client.solrj.SolrClient.deleteByQuery(SolrClient.java:873)
	at org.apache.solr.handler.TestReplicationHandler.testRateLimitedReplication(TestReplicationHandler.java:1382)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.SocketException: Too many open files
	at java.net.Socket.createImpl(Socket.java:460)
	at java.net.Socket.getImpl(Socket.java:520)
	at java.net.Socket.setSoTimeout(Socket.java:1141)
	at org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:120)
	at org.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:359)
	at org.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:381)
	at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:237)
	at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)
	at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)
	at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:111)
	at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
	at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
	at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:526)
	... 46 more


FAILED:  org.apache.solr.handler.TestReplicationHandler.doTestReplicateAfterStartup

Error Message:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.handler.TestReplicationHandler_E78A801B6608F367-001/solr-instance-006/collection1/core.properties: Too many open files

Stack Trace:
java.nio.file.FileSystemException: /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.handler.TestReplicationHandler_E78A801B6608F367-001/solr-instance-006/collection1/core.properties: Too many open files
	at sun.nio.fs.UnixException.translateToIOException(UnixException.java:91)
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:102)
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:107)
	at sun.nio.fs.UnixFileSystemProvider.newByteChannel(UnixFileSystemProvider.java:214)
	at java.nio.file.spi.FileSystemProvider.newOutputStream(FileSystemProvider.java:434)
	at java.nio.file.Files.newOutputStream(Files.java:216)
	at org.apache.solr.SolrTestCaseJ4.writeCoreProperties(SolrTestCaseJ4.java:200)
	at org.apache.solr.handler.TestReplicationHandler$SolrInstance.setUp(TestReplicationHandler.java:1628)
	at org.apache.solr.handler.TestReplicationHandler.setUp(TestReplicationHandler.java:123)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:968)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)


FAILED:  org.apache.solr.handler.TestReplicationHandler.doTestRepeater

Error Message:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.handler.TestReplicationHandler_E78A801B6608F367-001/solr-instance-008/collection1/core.properties: Too many open files

Stack Trace:
java.nio.file.FileSystemException: /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-core/test/J0/temp/solr.handler.TestReplicationHandler_E78A801B6608F367-001/solr-instance-008/collection1/core.properties: Too many open files
	at sun.nio.fs.UnixException.translateToIOException(UnixException.java:91)
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:102)
	at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:107)
	at sun.nio.fs.UnixFileSystemProvider.newByteChannel(UnixFileSystemProvider.java:214)
	at java.nio.file.spi.FileSystemProvider.newOutputStream(FileSystemProvider.java:434)
	at java.nio.file.Files.newOutputStream(Files.java:216)
	at org.apache.solr.SolrTestCaseJ4.writeCoreProperties(SolrTestCaseJ4.java:200)
	at org.apache.solr.handler.TestReplicationHandler$SolrInstance.setUp(TestReplicationHandler.java:1628)
	at org.apache.solr.handler.TestReplicationHandler.setUp(TestReplicationHandler.java:123)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:968)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:

[...truncated too long message...]

   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >    8) Thread[id=16807, name=zkCallback-3721-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >    9) Thread[id=16824, name=zkCallback-3723-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >   10) Thread[id=16829, name=zkCallback-3722-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >   11) Thread[id=16833, name=zkCallback-3721-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >   12) Thread[id=16827, name=zkCallback-3722-thread-2, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >   13) Thread[id=16802, name=jetty-launcher-3706-thread-3-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at java.lang.Thread.sleep(Native Method)
   [junit4]    >         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1051)
   [junit4]    >   14) Thread[id=16826, name=zkCallback-3723-thread-4, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    >   15) Thread[id=16800, name=jetty-launcher-3706-thread-2-SendThread(127.0.0.1:51698), state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at java.lang.Thread.sleep(Native Method)
   [junit4]    >         at org.apache.zookeeper.client.StaticHostProvider.next(StaticHostProvider.java:101)
   [junit4]    >         at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:997)
   [junit4]    >         at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1060)
   [junit4]    >   16) Thread[id=16808, name=zkCallback-3722-thread-1, state=TIMED_WAITING, group=TGRP-ClusterStateUpdateTest]
   [junit4]    >         at sun.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
   [junit4]    >         at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:362)
   [junit4]    >         at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:941)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1073)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1134)
   [junit4]    >         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   [junit4]    >         at java.lang.Thread.run(Thread.java:748)
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([E78A801B6608F367]:0)
   [junit4] Completed [373/733 (3!)] on J0 in 24.55s, 1 test, 3 errors <<< FAILURES!

[...truncated 47655 lines...]
-documentation-lint:
    [jtidy] Checking for broken html (such as invalid tags)...
   [delete] Deleting directory /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/build/jtidy_tmp
     [echo] Checking for broken links...
     [exec] 
     [exec] Crawl/parse...
     [exec] 
     [exec] Verify...
     [exec] 
     [exec] file:///Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/docs/quickstart.html
     [exec]   BAD EXTERNAL LINK: https://lucene.apache.org/solr/guide/solr-tutorial.html
     [exec] 
     [exec] Broken javadocs links were found! Common root causes:
     [exec] * A typo of some sort for manually created links.
     [exec] * Public methods referencing non-public classes in their signature.

BUILD FAILED
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/build.xml:826: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/build.xml:101: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build.xml:669: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build.xml:682: The following error occurred while executing this line:
/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/lucene/common-build.xml:2570: exec returned: 1

Total time: 74 minutes 6 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
[WARNINGS] Skipping publisher since build result is FAILURE
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-7.x-MacOSX (64bit/jdk-9) - Build # 223 - Still unstable!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-MacOSX/223/
Java: 64bit/jdk-9 -XX:-UseCompressedOops -XX:+UseG1GC --illegal-access=deny

1 tests failed.
FAILED:  org.apache.solr.client.solrj.io.stream.StreamExpressionTest.testExecutorStream

Error Message:
Error from server at https://127.0.0.1:56976/solr/workQueue_shard2_replica_n3: Expected mime type application/octet-stream but got text/html. <html> <head> <meta http-equiv="Content-Type" content="text/html;charset=ISO-8859-1"/> <title>Error 404 </title> </head> <body> <h2>HTTP ERROR: 404</h2> <p>Problem accessing /solr/workQueue_shard2_replica_n3/update. Reason: <pre>    Can not find: /solr/workQueue_shard2_replica_n3/update</pre></p> <hr /><a href="http://eclipse.org/jetty">Powered by Jetty:// 9.3.20.v20170531</a><hr/> </body> </html> 

Stack Trace:
org.apache.solr.client.solrj.impl.CloudSolrClient$RouteException: Error from server at https://127.0.0.1:56976/solr/workQueue_shard2_replica_n3: Expected mime type application/octet-stream but got text/html. <html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=ISO-8859-1"/>
<title>Error 404 </title>
</head>
<body>
<h2>HTTP ERROR: 404</h2>
<p>Problem accessing /solr/workQueue_shard2_replica_n3/update. Reason:
<pre>    Can not find: /solr/workQueue_shard2_replica_n3/update</pre></p>
<hr /><a href="http://eclipse.org/jetty">Powered by Jetty:// 9.3.20.v20170531</a><hr/>
</body>
</html>

	at __randomizedtesting.SeedInfo.seed([89BF67B7CD937318:AB7FE64CEEF95908]:0)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.directUpdate(CloudSolrClient.java:539)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:993)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:862)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:793)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:178)
	at org.apache.solr.client.solrj.request.UpdateRequest.commit(UpdateRequest.java:233)
	at org.apache.solr.client.solrj.io.stream.StreamExpressionTest.testExecutorStream(StreamExpressionTest.java:7471)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:564)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:844)
Caused by: org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at https://127.0.0.1:56976/solr/workQueue_shard2_replica_n3: Expected mime type application/octet-stream but got text/html. <html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=ISO-8859-1"/>
<title>Error 404 </title>
</head>
<body>
<h2>HTTP ERROR: 404</h2>
<p>Problem accessing /solr/workQueue_shard2_replica_n3/update. Reason:
<pre>    Can not find: /solr/workQueue_shard2_replica_n3/update</pre></p>
<hr /><a href="http://eclipse.org/jetty">Powered by Jetty:// 9.3.20.v20170531</a><hr/>
</body>
</html>

	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:591)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:253)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:242)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:483)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:413)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.lambda$directUpdate$0(CloudSolrClient.java:516)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:188)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
	... 1 more




Build Log:
[...truncated 14844 lines...]
   [junit4] Suite: org.apache.solr.client.solrj.io.stream.StreamExpressionTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/init-core-data-001
   [junit4]   2> 117928 WARN  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=4 numCloses=4
   [junit4]   2> 117928 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 117928 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", value=0.0/0.0, ssl=0.0/0.0, clientAuth=0.0/0.0) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 117929 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 117929 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001
   [junit4]   2> 117929 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 117930 INFO  (Thread-397) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 117930 INFO  (Thread-397) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 117932 ERROR (Thread-397) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 118041 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.ZkTestServer start zk server on port:56970
   [junit4]   2> 118077 INFO  (jetty-launcher-889-thread-1) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 118077 INFO  (jetty-launcher-889-thread-3) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 118077 INFO  (jetty-launcher-889-thread-2) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 118080 INFO  (jetty-launcher-889-thread-4) [    ] o.e.j.s.Server jetty-9.3.20.v20170531
   [junit4]   2> 118083 INFO  (jetty-launcher-889-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@24cb4288{/solr,null,AVAILABLE}
   [junit4]   2> 118086 INFO  (jetty-launcher-889-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@225ab1bf{/solr,null,AVAILABLE}
   [junit4]   2> 118087 INFO  (jetty-launcher-889-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@36f5816a{SSL,[ssl, http/1.1]}{127.0.0.1:56972}
   [junit4]   2> 118087 INFO  (jetty-launcher-889-thread-2) [    ] o.e.j.s.Server Started @121314ms
   [junit4]   2> 118087 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=56972}
   [junit4]   2> 118087 INFO  (jetty-launcher-889-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@5f9e6873{SSL,[ssl, http/1.1]}{127.0.0.1:56973}
   [junit4]   2> 118087 INFO  (jetty-launcher-889-thread-3) [    ] o.e.j.s.Server Started @121315ms
   [junit4]   2> 118087 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=56973}
   [junit4]   2> 118087 ERROR (jetty-launcher-889-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 118088 ERROR (jetty-launcher-889-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T22:57:38.546462Z
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 118088 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T22:57:38.546819Z
   [junit4]   2> 118092 INFO  (jetty-launcher-889-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1d0ef707{/solr,null,AVAILABLE}
   [junit4]   2> 118092 INFO  (jetty-launcher-889-thread-4) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5e7ee8e1{/solr,null,AVAILABLE}
   [junit4]   2> 118093 INFO  (jetty-launcher-889-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@9c15975{SSL,[ssl, http/1.1]}{127.0.0.1:56975}
   [junit4]   2> 118093 INFO  (jetty-launcher-889-thread-1) [    ] o.e.j.s.Server Started @121321ms
   [junit4]   2> 118094 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=56975}
   [junit4]   2> 118094 INFO  (jetty-launcher-889-thread-4) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7838d904{SSL,[ssl, http/1.1]}{127.0.0.1:56976}
   [junit4]   2> 118094 ERROR (jetty-launcher-889-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 118094 INFO  (jetty-launcher-889-thread-4) [    ] o.e.j.s.Server Started @121321ms
   [junit4]   2> 118094 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=56976}
   [junit4]   2> 118094 ERROR (jetty-launcher-889-thread-4) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T22:57:38.553215Z
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.1.0
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null, Default config dir: null
   [junit4]   2> 118095 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-02T22:57:38.553549Z
   [junit4]   2> 118102 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 118103 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 118104 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 118105 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 118117 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:56970/solr
   [junit4]   2> 118117 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:56970/solr
   [junit4]   2> 118118 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:56970/solr
   [junit4]   2> 118120 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn caught end of stream exception
   [junit4]   2> EndOfStreamException: Unable to read additional data from client sessionid 0x15edf4f20550006, likely client has closed socket
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:239)
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:203)
   [junit4]   2> 	at java.base/java.lang.Thread.run(Thread.java:844)
   [junit4]   2> 118121 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:56970/solr
   [junit4]   2> 118126 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn caught end of stream exception
   [junit4]   2> EndOfStreamException: Unable to read additional data from client sessionid 0x15edf4f20550009, likely client has closed socket
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:239)
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:203)
   [junit4]   2> 	at java.base/java.lang.Thread.run(Thread.java:844)
   [junit4]   2> 118154 WARN  (jetty-launcher-889-thread-3) [    ] o.a.s.c.ZkController The _default configset could not be uploaded. Please provide 'solr.default.confdir' parameter that points to a configset intended to be the default. Current 'solr.default.confdir' value: null
   [junit4]   2> 118156 WARN  (jetty-launcher-889-thread-2) [    ] o.a.s.c.ZkController The _default configset could not be uploaded. Please provide 'solr.default.confdir' parameter that points to a configset intended to be the default. Current 'solr.default.confdir' value: null
   [junit4]   2> 118156 WARN  (jetty-launcher-889-thread-4) [    ] o.a.s.c.ZkController The _default configset could not be uploaded. Please provide 'solr.default.confdir' parameter that points to a configset intended to be the default. Current 'solr.default.confdir' value: null
   [junit4]   2> 118157 WARN  (jetty-launcher-889-thread-1) [    ] o.a.s.c.ZkController The _default configset could not be uploaded. Please provide 'solr.default.confdir' parameter that points to a configset intended to be the default. Current 'solr.default.confdir' value: null
   [junit4]   2> 118165 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 118165 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 118165 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 118166 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:56975_solr
   [junit4]   2> 118167 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:56973_solr
   [junit4]   2> 118167 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 118168 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:56976_solr
   [junit4]   2> 118169 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.Overseer Overseer (id=98761772786581516-127.0.0.1:56975_solr-n_0000000000) starting
   [junit4]   2> 118170 INFO  (zkCallback-911-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 118171 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 118171 INFO  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 118171 INFO  (zkCallback-909-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 118177 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:56972_solr
   [junit4]   2> 118181 INFO  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 118181 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 118182 INFO  (zkCallback-911-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 118182 INFO  (zkCallback-909-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 118201 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:56975_solr
   [junit4]   2> 118203 INFO  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 118204 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 118204 INFO  (zkCallback-911-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 118204 INFO  (zkCallback-909-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 118253 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56976.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118263 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56976.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118275 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56975.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118279 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56976.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118280 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56972.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118284 INFO  (jetty-launcher-889-thread-4) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node4/.
   [junit4]   2> 118319 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56973.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118319 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56972.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118320 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56972.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118320 WARN  (jetty-launcher-889-thread-1) [    ] o.a.s.m.r.j.JmxMetricsReporter Unable to register gauge
   [junit4]   2> javax.management.InstanceNotFoundException: solr_56975:dom1=solr,dom2=jvm,name0=os,name=committedVirtualMemorySize
   [junit4]   2> 	at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1083)
   [junit4]   2> 	at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:423)
   [junit4]   2> 	at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:411)
   [junit4]   2> 	at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:546)
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter$JmxListener.registerMBean(JmxMetricsReporter.java:530)
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter$JmxListener.onGaugeAdded(JmxMetricsReporter.java:565)
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter.lambda$start$0(JmxMetricsReporter.java:738)
   [junit4]   2> 	at java.base/java.util.HashMap.forEach(HashMap.java:1340)
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter.start(JmxMetricsReporter.java:728)
   [junit4]   2> 	at org.apache.solr.metrics.reporters.SolrJmxReporter.doInit(SolrJmxReporter.java:109)
   [junit4]   2> 	at org.apache.solr.metrics.SolrMetricReporter.init(SolrMetricReporter.java:70)
   [junit4]   2> 	at org.apache.solr.metrics.SolrMetricManager.loadReporter(SolrMetricManager.java:849)
   [junit4]   2> 	at org.apache.solr.metrics.SolrMetricManager.loadReporters(SolrMetricManager.java:817)
   [junit4]   2> 	at org.apache.solr.core.CoreContainer.load(CoreContainer.java:544)
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.createCoreContainer(SolrDispatchFilter.java:263)
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.init(SolrDispatchFilter.java:183)
   [junit4]   2> 	at org.eclipse.jetty.servlet.FilterHolder.initialize(FilterHolder.java:139)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:873)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1596)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.setFilterMappings(ServletHandler.java:1659)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.addFilterMapping(ServletHandler.java:1316)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.addFilterWithMapping(ServletHandler.java:1145)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletContextHandler.addFilter(ServletContextHandler.java:448)
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner$1.lifeCycleStarted(JettySolrRunner.java:306)
   [junit4]   2> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.setStarted(AbstractLifeCycle.java:179)
   [junit4]   2> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:69)
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:394)
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:367)
   [junit4]   2> 	at org.apache.solr.cloud.MiniSolrCloudCluster.startJettySolrRunner(MiniSolrCloudCluster.java:384)
   [junit4]   2> 	at org.apache.solr.cloud.MiniSolrCloudCluster.lambda$new$0(MiniSolrCloudCluster.java:247)
   [junit4]   2> 	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
   [junit4]   2> 	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:188)
   [junit4]   2> 	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
   [junit4]   2> 	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
   [junit4]   2> 	at java.base/java.lang.Thread.run(Thread.java:844)
   [junit4]   2> 118324 INFO  (jetty-launcher-889-thread-2) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node2/.
   [junit4]   2> 118334 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56975.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118334 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56975.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118338 INFO  (jetty-launcher-889-thread-1) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node1/.
   [junit4]   2> 118344 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56973.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118344 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56973.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 118345 INFO  (jetty-launcher-889-thread-3) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node3/.
   [junit4]   2> 118456 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 118457 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:56970/solr ready
   [junit4]   2> 118480 INFO  (qtp1454681523-2707) [    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params replicationFactor=1&collection.configName=conf&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 118481 INFO  (OverseerThreadFactory-895-thread-1) [    ] o.a.s.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 118606 INFO  (OverseerStateUpdate-98761772786581516-127.0.0.1:56975_solr-n_0000000000) [    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"collection1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:56976/solr",
   [junit4]   2>   "type":"NRT"} 
   [junit4]   2> 118609 INFO  (OverseerStateUpdate-98761772786581516-127.0.0.1:56975_solr-n_0000000000) [    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"collection1_shard2_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:56972/solr",
   [junit4]   2>   "type":"NRT"} 
   [junit4]   2> 118829 INFO  (qtp1454681523-2701) [    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=collection1_shard1_replica_n1&action=CREATE&numShards=2&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 118829 INFO  (qtp1454681523-2701) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 118835 INFO  (qtp783533656-2697) [    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node4&collection.configName=conf&newCollection=true&name=collection1_shard2_replica_n2&action=CREATE&numShards=2&collection=collection1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 118835 INFO  (qtp783533656-2697) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 118942 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 118942 INFO  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 118942 INFO  (zkCallback-910-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 118943 INFO  (zkCallback-907-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 119848 INFO  (qtp1454681523-2701) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.1.0
   [junit4]   2> 119863 INFO  (qtp783533656-2697) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.1.0
   [junit4]   2> 119864 INFO  (qtp1454681523-2701) [    ] o.a.s.s.IndexSchema [collection1_shard1_replica_n1] Schema name=test
   [junit4]   2> 119918 INFO  (qtp783533656-2697) [    ] o.a.s.s.IndexSchema [collection1_shard2_replica_n2] Schema name=test
   [junit4]   2> 119991 WARN  (qtp1454681523-2701) [    ] o.a.s.s.IndexSchema Field lowerfilt1and2 is not multivalued and destination for multiple copyFields (2)
   [junit4]   2> 119991 WARN  (qtp1454681523-2701) [    ] o.a.s.s.IndexSchema Field text is not multivalued and destination for multiple copyFields (3)
   [junit4]   2> 119991 INFO  (qtp1454681523-2701) [    ] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 119991 INFO  (qtp1454681523-2701) [    ] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n1' using configuration from collection collection1, trusted=true
   [junit4]   2> 119992 INFO  (qtp1454681523-2701) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56976.solr.core.collection1.shard1.replica_n1' (registry 'solr.core.collection1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 119992 INFO  (qtp1454681523-2701) [    ] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 119992 INFO  (qtp1454681523-2701) [    ] o.a.s.c.SolrCore [[collection1_shard1_replica_n1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node4/collection1_shard1_replica_n1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node4/./collection1_shard1_replica_n1/data/]
   [junit4]   2> 120040 WARN  (qtp783533656-2697) [    ] o.a.s.s.IndexSchema Field lowerfilt1and2 is not multivalued and destination for multiple copyFields (2)
   [junit4]   2> 120040 WARN  (qtp783533656-2697) [    ] o.a.s.s.IndexSchema Field text is not multivalued and destination for multiple copyFields (3)
   [junit4]   2> 120040 INFO  (qtp783533656-2697) [    ] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 120040 INFO  (qtp783533656-2697) [    ] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard2_replica_n2' using configuration from collection collection1, trusted=true
   [junit4]   2> 120040 INFO  (qtp783533656-2697) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_56972.solr.core.collection1.shard2.replica_n2' (registry 'solr.core.collection1.shard2.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@21028236
   [junit4]   2> 120040 INFO  (qtp783533656-2697) [    ] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 120040 INFO  (qtp783533656-2697) [    ] o.a.s.c.SolrCore [[collection1_shard2_replica_n2] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node2/collection1_shard2_replica_n2], dataDir=[/Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001/tempDir-001/node2/./collection1_shard2_replica_n2/data/]
   [junit4]   2> 120119 INFO  (qtp1454681523-2701) [    ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 120119 INFO  (qtp1454681523-2701) [    ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 120121 INFO  (qtp1454681523-2701) [    ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 120121 INFO  (qtp1454681523-2701) [    ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 120133 INFO  (qtp1454681523-2701) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@d0e2376[collection1_shard1_replica_n1] main]
   [junit4]   2> 120136 INFO  (qtp1454681523-2701) [    ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 120139 INFO  (qtp1454681523-2701) [    ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 120140 INFO  (qtp1454681523-2701) [    ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 120140 INFO  (qtp1454681523-2701) [    ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1580188366901608448
   [junit4]   2> 120140 INFO  (searcherExecutor-904-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@d0e2376[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 120153 INFO  (qtp1454681523-2701) [    ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 120153 INFO  (qtp1454681523-2701) [    ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 120153 INFO  (qtp1454681523-2701) [    ] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/
   [junit4]   2> 120154 INFO  (qtp1454681523-2701) [    ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 120154 INFO  (qtp1454681523-2701) [    ] o.a.s.c.SyncStrategy https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/ has no replicas
   [junit4]   2> 120154 INFO  (qtp1454681523-2701) [    ] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 120158 INFO  (qtp1454681523-2701) [    ] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/ shard1
   [junit4]   2> 120177 INFO  (qtp783533656-2697) [    ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 120177 INFO  (qtp783533656-2697) [    ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 120178 INFO  (qtp783533656-2697) [    ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 120178 INFO  (qtp783533656-2697) [    ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 120190 INFO  (qtp783533656-2697) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@17fef66f[collection1_shard2_replica_n2] main]
   [junit4]   2> 120191 INFO  (qtp783533656-2697) [    ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 120192 INFO  (qtp783533656-2697) [    ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 120192 INFO  (qtp783533656-2697) [    ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 120192 INFO  (qtp783533656-2697) [    ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1580188366956134400
   [junit4]   2> 120193 INFO  (searcherExecutor-905-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard2_replica_n2] Registered new searcher Searcher@17fef66f[collection1_shard2_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 120199 INFO  (qtp783533656-2697) [    ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 120199 INFO  (qtp783533656-2697) [    ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 120199 INFO  (qtp783533656-2697) [    ] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 120199 INFO  (qtp783533656-2697) [    ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 120199 INFO  (qtp783533656-2697) [    ] o.a.s.c.SyncStrategy https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/ has no replicas
   [junit4]   2> 120199 INFO  (qtp783533656-2697) [    ] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 120203 INFO  (qtp783533656-2697) [    ] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/ shard2
   [junit4]   2> 120307 INFO  (zkCallback-907-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120307 INFO  (zkCallback-910-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120307 INFO  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120307 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120310 INFO  (qtp1454681523-2701) [    ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 120312 INFO  (qtp1454681523-2701) [    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=collection1_shard1_replica_n1&action=CREATE&numShards=2&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1482
   [junit4]   2> 120356 INFO  (qtp783533656-2697) [    ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 120362 INFO  (qtp783533656-2697) [    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node4&collection.configName=conf&newCollection=true&name=collection1_shard2_replica_n2&action=CREATE&numShards=2&collection=collection1&shard=shard2&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1526
   [junit4]   2> 120375 INFO  (qtp1454681523-2707) [    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 30 seconds. Check all shard replicas
   [junit4]   2> 120473 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120473 INFO  (zkCallback-907-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120473 INFO  (zkCallback-910-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120473 INFO  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 120485 INFO  (OverseerCollectionConfigSetProcessor-98761772786581516-127.0.0.1:56975_solr-n_0000000000) [    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 121382 INFO  (qtp1454681523-2707) [    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={replicationFactor=1&collection.configName=conf&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&wt=javabin&version=2} status=0 QTime=2902
   [junit4]   2> 121382 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):90
   [junit4]   2> 121384 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 121424 INFO  (TEST-StreamExpressionTest.testBinomialDistribution-seed#[89BF67B7CD937318]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testBinomialDistribution
   [junit4]   2> 121436 INFO  (qtp783533656-2700) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=TOLEADER&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&wt=javabin&version=2}{deleteByQuery=*:* (-1580188368259514368)} 0 1
   [junit4]   2> 121441 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368265805824,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121441 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 121442 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121442 INFO  (qtp783533656-2693) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 1
   [junit4]   2> 121443 INFO  (qtp1454681523-2705) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368267902976,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121443 INFO  (qtp1454681523-2705) [    ] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 121444 INFO  (qtp1454681523-2705) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121444 INFO  (qtp1454681523-2705) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 121445 INFO  (qtp1454681523-2704) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&commit=true&wt=javabin&version=2}{deleteByQuery=*:* (-1580188368251125760),commit=} 0 18
   [junit4]   2> 121457 INFO  (qtp2019914718-2722) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 121464 INFO  (qtp783533656-2697) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/stream params={indent=off&expr=let(a%3DbinomialDistribution(100,+.50),+++++++++++++++b%3Dsample(a,+10000),+++++++++++++++tuple(d%3Ddescribe(b),++++++++++++++++++++++p%3Dprobability(a,+50),++++++++++++++++++++++c%3DcumulativeProbability(a,+50)))&wt=json&version=2.2} status=0 QTime=2
   [junit4]   2> 121683 INFO  (TEST-StreamExpressionTest.testBinomialDistribution-seed#[89BF67B7CD937318]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testBinomialDistribution
   [junit4]   2> 121706 INFO  (TEST-StreamExpressionTest.testCorrelationStream-seed#[89BF67B7CD937318]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testCorrelationStream
   [junit4]   2> 121729 INFO  (qtp783533656-2698) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=TOLEADER&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&wt=javabin&version=2}{deleteByQuery=*:* (-1580188368545775616)} 0 21
   [junit4]   2> 121731 INFO  (qtp783533656-2699) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368569892864,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121731 INFO  (qtp1454681523-2701) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368569892864,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121731 INFO  (qtp783533656-2699) [    ] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 121731 INFO  (qtp1454681523-2701) [    ] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 121731 INFO  (qtp1454681523-2701) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121731 INFO  (qtp783533656-2699) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121731 INFO  (qtp1454681523-2701) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 121731 INFO  (qtp783533656-2699) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 121732 INFO  (qtp1454681523-2710) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&commit=true&wt=javabin&version=2}{deleteByQuery=*:* (-1580188368544727040),commit=} 0 25
   [junit4]   2> 121766 INFO  (qtp1454681523-2707) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&wt=javabin&version=2}{add=[id_2 (1580188368577232896), id_10 (1580188368588767232), id_13 (1580188368588767233), id_14 (1580188368589815808), id_16 (1580188368589815809), id_17 (1580188368589815810), id_18 (1580188368589815811), id_21 (1580188368589815812), id_22 (1580188368589815813), id_23 (1580188368589815814), ... (125 adds)]} 0 28
   [junit4]   2> 121767 INFO  (qtp783533656-2695) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&wt=javabin&version=2}{add=[id_1 (1580188368578281472), id_3 (1580188368589815808), id_4 (1580188368589815809), id_5 (1580188368590864384), id_6 (1580188368590864385), id_7 (1580188368590864386), id_8 (1580188368590864387), id_9 (1580188368590864388), id_11 (1580188368590864389), id_12 (1580188368590864390), ... (125 adds)]} 0 27
   [junit4]   2> 121781 INFO  (qtp1454681523-2705) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368622321664,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121781 INFO  (qtp1454681523-2705) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@5ca7ee5b commitCommandVersion:1580188368622321664
   [junit4]   2> 121781 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368622321664,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121781 INFO  (qtp783533656-2693) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@50b07988 commitCommandVersion:1580188368622321664
   [junit4]   2> 121847 INFO  (qtp1454681523-2705) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@3b83e34b[collection1_shard1_replica_n1] main]
   [junit4]   2> 121848 INFO  (qtp1454681523-2705) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121849 INFO  (searcherExecutor-904-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@3b83e34b[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_0(7.1.0):C125)))}
   [junit4]   2> 121849 INFO  (qtp1454681523-2705) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 67
   [junit4]   2> 121851 INFO  (qtp783533656-2693) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@ec1969e[collection1_shard2_replica_n2] main]
   [junit4]   2> 121852 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121852 INFO  (searcherExecutor-905-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard2_replica_n2] Registered new searcher Searcher@ec1969e[collection1_shard2_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_0(7.1.0):C125)))}
   [junit4]   2> 121852 INFO  (qtp783533656-2693) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 71
   [junit4]   2> 121853 INFO  (qtp783533656-2697) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&commit=true&wt=javabin&version=2}{commit=} 0 75
   [junit4]   2> 121862 INFO  (qtp783533656-2700) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/stream params={indent=off&expr=let(a%3Dtimeseries(collection1,+q%3D"*:*",+start%3D"2013-01-01T01:00:00.000Z",+end%3D"2016-12-01T01:00:00.000Z",+gap%3D"%2B1YEAR",+field%3D"test_dt",+count(*),+sum(price_f),+max(price_f),+min(price_f)),+b%3Dselect(timeseries(collection1,+q%3D"*:*",+start%3D"2013-01-01T01:00:00.000Z",+end%3D"2016-12-01T01:00:00.000Z",+gap%3D"%2B1YEAR",+field%3D"test_dt",+count(*),+sum(price_f),+max(price_f),+min(price_f)),mult(-1,+count(*))+as+nvalue),+c%3Dcol(a,+count(*)),+d%3Dcol(b,+nvalue),+tuple(corr%3Dcorr(c,d),+scorr%3DspearmansCorr(array(500,+50,+50,+50),d),+kcorr%3DkendallsCorr(array(500,+50,+50,+50),d),+d%3Dd))&wt=json&version=2.2} status=0 QTime=2
   [junit4]   2> 121866 INFO  (qtp783533656-2700) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 121867 INFO  (qtp783533656-2700) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:56970/solr ready
   [junit4]   2> 121894 INFO  (qtp783533656-2698) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={df=text&distrib=false&_stateVer_=collection1:4&_facet_={}&fl=id&fl=score&shards.purpose=1048580&start=0&fsv=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&rows=0&version=2&q=*:*&json.facet={"timeseries":{"type":"range","field":"test_dt","start":"2013-01-01T01:00:00.000Z","end":"2016-12-01T01:00:00.000Z","gap":"%2B1YEAR","facet":{"facet_0":"sum(price_f)","facet_1":"max(price_f)","facet_2":"min(price_f)"}}}&field=test_dt&NOW=1506985062328&isShard=true&wt=javabin} hits=125 status=0 QTime=11
   [junit4]   2> 121894 INFO  (qtp1454681523-2710) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={df=text&distrib=false&_stateVer_=collection1:4&_facet_={}&fl=id&fl=score&shards.purpose=1048580&start=0&fsv=true&shard.url=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&rows=0&version=2&q=*:*&json.facet={"timeseries":{"type":"range","field":"test_dt","start":"2013-01-01T01:00:00.000Z","end":"2016-12-01T01:00:00.000Z","gap":"%2B1YEAR","facet":{"facet_0":"sum(price_f)","facet_1":"max(price_f)","facet_2":"min(price_f)"}}}&field=test_dt&NOW=1506985062328&isShard=true&wt=javabin} hits=125 status=0 QTime=11
   [junit4]   2> 121895 INFO  (qtp783533656-2699) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=*:*&json.facet={"timeseries":{"type":"range","field":"test_dt","start":"2013-01-01T01:00:00.000Z","end":"2016-12-01T01:00:00.000Z","gap":"%2B1YEAR","facet":{"facet_0":"sum(price_f)","facet_1":"max(price_f)","facet_2":"min(price_f)"}}}&_stateVer_=collection1:4&field=test_dt&rows=0&wt=javabin&version=2} hits=250 status=0 QTime=25
   [junit4]   2> 121900 INFO  (qtp1454681523-2707) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={df=text&distrib=false&_stateVer_=collection1:4&_facet_={}&fl=id&fl=score&shards.purpose=1048580&start=0&fsv=true&shard.url=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&rows=0&version=2&q=*:*&json.facet={"timeseries":{"type":"range","field":"test_dt","start":"2013-01-01T01:00:00.000Z","end":"2016-12-01T01:00:00.000Z","gap":"%2B1YEAR","facet":{"facet_0":"sum(price_f)","facet_1":"max(price_f)","facet_2":"min(price_f)"}}}&field=test_dt&NOW=1506985062354&isShard=true&wt=javabin} hits=125 status=0 QTime=2
   [junit4]   2> 121900 INFO  (qtp783533656-2695) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={df=text&distrib=false&_stateVer_=collection1:4&_facet_={}&fl=id&fl=score&shards.purpose=1048580&start=0&fsv=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&rows=0&version=2&q=*:*&json.facet={"timeseries":{"type":"range","field":"test_dt","start":"2013-01-01T01:00:00.000Z","end":"2016-12-01T01:00:00.000Z","gap":"%2B1YEAR","facet":{"facet_0":"sum(price_f)","facet_1":"max(price_f)","facet_2":"min(price_f)"}}}&field=test_dt&NOW=1506985062354&isShard=true&wt=javabin} hits=125 status=0 QTime=2
   [junit4]   2> 121901 INFO  (qtp783533656-2697) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=*:*&json.facet={"timeseries":{"type":"range","field":"test_dt","start":"2013-01-01T01:00:00.000Z","end":"2016-12-01T01:00:00.000Z","gap":"%2B1YEAR","facet":{"facet_0":"sum(price_f)","facet_1":"max(price_f)","facet_2":"min(price_f)"}}}&_stateVer_=collection1:4&field=test_dt&rows=0&wt=javabin&version=2} hits=250 status=0 QTime=4
   [junit4]   2> 121918 INFO  (TEST-StreamExpressionTest.testCorrelationStream-seed#[89BF67B7CD937318]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testCorrelationStream
   [junit4]   2> 121945 INFO  (TEST-StreamExpressionTest.testParallelTopicStream-seed#[89BF67B7CD937318]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testParallelTopicStream
   [junit4]   2> 121948 INFO  (qtp783533656-2696) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@389c221d[collection1_shard2_replica_n2] realtime]
   [junit4]   2> 121950 INFO  (qtp1454681523-2706) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@286959eb[collection1_shard1_replica_n1] realtime]
   [junit4]   2> 121950 INFO  (qtp1454681523-2706) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=TOLEADER&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2}{deleteByQuery=*:* (-1580188368796385280)} 0 3
   [junit4]   2> 121953 INFO  (qtp1454681523-2708) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368802676736,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121953 INFO  (qtp1454681523-2708) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@5ca7ee5b commitCommandVersion:1580188368802676736
   [junit4]   2> 121953 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368802676736,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121953 INFO  (qtp783533656-2693) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@50b07988 commitCommandVersion:1580188368802676736
   [junit4]   2> 121955 INFO  (qtp783533656-2693) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@4a17fe9c[collection1_shard2_replica_n2] main]
   [junit4]   2> 121956 INFO  (qtp783533656-2693) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121957 INFO  (searcherExecutor-905-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard2_replica_n2] Registered new searcher Searcher@4a17fe9c[collection1_shard2_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 121975 INFO  (qtp1454681523-2708) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@1766623e[collection1_shard1_replica_n1] main]
   [junit4]   2> 121976 INFO  (qtp1454681523-2708) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121977 INFO  (searcherExecutor-904-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@1766623e[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 121977 INFO  (qtp783533656-2693) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 24
   [junit4]   2> 121978 INFO  (qtp1454681523-2708) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 25
   [junit4]   2> 121979 INFO  (qtp783533656-2696) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&commit=true&wt=javabin&version=2}{deleteByQuery=*:* (-1580188368795336704),commit=} 0 33
   [junit4]   2> 121983 INFO  (qtp1454681523-2705) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&wt=javabin&version=2}{add=[0 (1580188368830988288), 4 (1580188368834134016), 1 (1580188368834134017), 8 (1580188368834134018)]} 0 2
   [junit4]   2> 121983 INFO  (qtp783533656-2698) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&wt=javabin&version=2}{add=[2 (1580188368830988288), 3 (1580188368834134016), 5 (1580188368834134017), 6 (1580188368834134018), 7 (1580188368834134019), 9 (1580188368834134020)]} 0 2
   [junit4]   2> 121985 INFO  (qtp783533656-2695) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368836231168,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121985 INFO  (qtp1454681523-2701) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368836231168,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 121985 INFO  (qtp783533656-2695) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@50b07988 commitCommandVersion:1580188368836231168
   [junit4]   2> 121985 INFO  (qtp1454681523-2701) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@5ca7ee5b commitCommandVersion:1580188368836231168
   [junit4]   2> 121997 INFO  (qtp1454681523-2701) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@37586954[collection1_shard1_replica_n1] main]
   [junit4]   2> 121997 INFO  (qtp1454681523-2701) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 121998 INFO  (searcherExecutor-904-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@37586954[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_1(7.1.0):C4)))}
   [junit4]   2> 121998 INFO  (qtp1454681523-2701) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 13
   [junit4]   2> 121999 INFO  (qtp783533656-2695) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@450a11[collection1_shard2_replica_n2] main]
   [junit4]   2> 122000 INFO  (qtp783533656-2695) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 122000 INFO  (searcherExecutor-905-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard2_replica_n2] Registered new searcher Searcher@450a11[collection1_shard2_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_1(7.1.0):C6)))}
   [junit4]   2> 122001 INFO  (qtp783533656-2695) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 15
   [junit4]   2> 122001 INFO  (qtp783533656-2699) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&commit=true&wt=javabin&version=2}{commit=} 0 17
   [junit4]   2> 122005 INFO  (TEST-StreamExpressionTest.testParallelTopicStream-seed#[89BF67B7CD937318]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 122007 INFO  (TEST-StreamExpressionTest.testParallelTopicStream-seed#[89BF67B7CD937318]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:56970/solr ready
   [junit4]   2> 122011 INFO  (qtp1454681523-2707) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/stream params={distrib=false&workerID=1&indent=off&numWorkers=2&expr=topic(collection1,collection1,q%3D"a_s:hello",fl%3Did,partitionKeys%3Did,rows%3D500,zkHost%3D"127.0.0.1:56970/solr",id%3D1000000,checkpointEvery%3D-1)&wt=json&version=2.2} status=0 QTime=0
   [junit4]   2> 122011 INFO  (qtp783533656-2693) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/stream params={distrib=false&workerID=0&indent=off&numWorkers=2&expr=topic(collection1,collection1,q%3D"a_s:hello",fl%3Did,partitionKeys%3Did,rows%3D500,zkHost%3D"127.0.0.1:56970/solr",id%3D1000000,checkpointEvery%3D-1)&wt=json&version=2.2} status=0 QTime=0
   [junit4]   2> 122030 INFO  (qtp783533656-2699) [    ] o.a.s.h.c.RealTimeGetComponent LOOKUP_SLICE:shard2=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 122030 INFO  (qtp783533656-2697) [    ] o.a.s.h.c.RealTimeGetComponent LOOKUP_SLICE:shard2=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 122032 INFO  (qtp783533656-2700) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&omitHeader=true&shards.purpose=1&NOW=1506985062488&ids=1000000_0&isShard=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2&shards.qt=/get} status=0 QTime=0
   [junit4]   2> 122032 INFO  (qtp783533656-2699) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={qt=/get&ids=1000000_0&wt=javabin&version=2} status=0 QTime=16
   [junit4]   2> 122033 INFO  (qtp783533656-2698) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&omitHeader=true&shards.purpose=1&NOW=1506985062488&ids=1000000_1&isShard=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2&shards.qt=/get} status=0 QTime=0
   [junit4]   2> 122034 INFO  (qtp783533656-2697) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={qt=/get&ids=1000000_1&wt=javabin&version=2} status=0 QTime=18
   [junit4]   2> 122034 INFO  (qtp783533656-2700) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=*:*&distrib=false&sort=_version_+desc&rows=1&wt=json&version=2.2} hits=6 status=0 QTime=0
   [junit4]   2> 122035 INFO  (qtp783533656-2699) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=*:*&distrib=false&sort=_version_+desc&rows=1&wt=json&version=2.2} hits=6 status=0 QTime=0
   [junit4]   2> 122038 INFO  (qtp1454681523-2708) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=*:*&distrib=false&sort=_version_+desc&rows=1&wt=json&version=2.2} hits=4 status=0 QTime=0
   [junit4]   2> 122039 INFO  (qtp1454681523-2706) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=*:*&distrib=false&sort=_version_+desc&rows=1&wt=json&version=2.2} hits=4 status=0 QTime=0
   [junit4]   2> 122046 INFO  (qtp1454681523-2710) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D0}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134018}_version_&rows=500&wt=json&version=2.2} hits=0 status=0 QTime=2
   [junit4]   2> 122046 INFO  (qtp1454681523-2704) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D1}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134018}_version_&rows=500&wt=json&version=2.2} hits=0 status=0 QTime=2
   [junit4]   2> 122046 INFO  (qtp783533656-2695) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D0}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134020}_version_&rows=500&wt=json&version=2.2} hits=0 status=0 QTime=2
   [junit4]   2> 122047 INFO  (qtp783533656-2697) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D1}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134020}_version_&rows=500&wt=json&version=2.2} hits=0 status=0 QTime=2
   [junit4]   2> 122051 INFO  (qtp783533656-2700) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&collection=collection1&wt=javabin&version=2}{add=[1000000_0 (1580188368903340032)]} 0 2
   [junit4]   2> 122052 INFO  (qtp783533656-2695) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&collection=collection1&wt=javabin&version=2}{add=[1000000_1 (1580188368906485760)]} 0 0
   [junit4]   2> 122055 INFO  (qtp1454681523-2710) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368909631488,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 122055 INFO  (qtp783533656-2697) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368909631488,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 122055 INFO  (qtp1454681523-2710) [    ] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 122056 INFO  (qtp783533656-2697) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@50b07988 commitCommandVersion:1580188368909631488
   [junit4]   2> 122056 INFO  (qtp1454681523-2710) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 122056 INFO  (qtp1454681523-2710) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 122083 INFO  (qtp783533656-2697) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@6e223d39[collection1_shard2_replica_n2] main]
   [junit4]   2> 122083 INFO  (qtp783533656-2697) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 122084 INFO  (searcherExecutor-905-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard2_replica_n2] Registered new searcher Searcher@6e223d39[collection1_shard2_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_1(7.1.0):C6) Uninverting(_2(7.1.0):C2)))}
   [junit4]   2> 122084 INFO  (qtp783533656-2697) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 28
   [junit4]   2> 122085 INFO  (qtp783533656-2700) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 30
   [junit4]   2> 122094 INFO  (qtp1454681523-2701) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=id:1000000*&distrib=false&fl=id,+checkpoint_ss,+_version_&sort=id+asc&wt=json&version=2.2} hits=0 status=0 QTime=5
   [junit4]   2> 122095 INFO  (qtp783533656-2696) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=id:1000000*&distrib=false&fl=id,+checkpoint_ss,+_version_&sort=id+asc&wt=json&version=2.2} hits=2 status=0 QTime=6
   [junit4]   2> 122107 INFO  (qtp1454681523-2707) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&wt=javabin&version=2}{add=[10 (1580188368953671680), 11 (1580188368964157440)]} 0 10
   [junit4]   2> 122111 INFO  (qtp783533656-2699) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368968351744,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 122111 INFO  (qtp1454681523-2706) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188368968351744,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 122111 INFO  (qtp783533656-2699) [    ] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 122111 INFO  (qtp1454681523-2706) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@5ca7ee5b commitCommandVersion:1580188368968351744
   [junit4]   2> 122112 INFO  (qtp783533656-2699) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 122112 INFO  (qtp783533656-2699) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 122117 INFO  (qtp1454681523-2706) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@1c723f6b[collection1_shard1_replica_n1] main]
   [junit4]   2> 122117 INFO  (qtp1454681523-2706) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 122118 INFO  (searcherExecutor-904-thread-1) [    ] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@1c723f6b[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_1(7.1.0):C4) Uninverting(_2(7.1.0):C2)))}
   [junit4]   2> 122118 INFO  (qtp1454681523-2706) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:56976/solr/collection1_shard1_replica_n1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 7
   [junit4]   2> 122119 INFO  (qtp1454681523-2710) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&commit=true&wt=javabin&version=2}{commit=} 0 10
   [junit4]   2> 122122 INFO  (qtp1454681523-2708) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/stream params={distrib=false&workerID=1&indent=off&numWorkers=2&expr=topic(collection1,collection1,q%3D"a_s:hello",fl%3Did,partitionKeys%3Did,rows%3D500,zkHost%3D"127.0.0.1:56970/solr",id%3D1000000,checkpointEvery%3D-1)&wt=json&version=2.2} status=0 QTime=0
   [junit4]   2> 122122 INFO  (qtp783533656-2695) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/stream params={distrib=false&workerID=0&indent=off&numWorkers=2&expr=topic(collection1,collection1,q%3D"a_s:hello",fl%3Did,partitionKeys%3Did,rows%3D500,zkHost%3D"127.0.0.1:56970/solr",id%3D1000000,checkpointEvery%3D-1)&wt=json&version=2.2} status=0 QTime=0
   [junit4]   2> 122125 INFO  (qtp783533656-2700) [    ] o.a.s.h.c.RealTimeGetComponent LOOKUP_SLICE:shard2=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 122126 INFO  (qtp783533656-2698) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&omitHeader=true&shards.purpose=1&NOW=1506985062583&ids=1000000_0&isShard=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2&shards.qt=/get} status=0 QTime=0
   [junit4]   2> 122126 INFO  (qtp783533656-2697) [    ] o.a.s.h.c.RealTimeGetComponent LOOKUP_SLICE:shard2=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 122127 INFO  (qtp783533656-2700) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={qt=/get&ids=1000000_0&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 122128 INFO  (qtp783533656-2693) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&omitHeader=true&shards.purpose=1&NOW=1506985062584&ids=1000000_1&isShard=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2&shards.qt=/get} status=0 QTime=0
   [junit4]   2> 122128 INFO  (qtp783533656-2697) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={qt=/get&ids=1000000_1&wt=javabin&version=2} status=0 QTime=2
   [junit4]   2> 122131 INFO  (qtp783533656-2696) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D0}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134020}_version_&rows=500&wt=json&version=2.2} hits=0 status=0 QTime=0
   [junit4]   2> 122133 INFO  (qtp1454681523-2706) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D0}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134018}_version_&rows=500&wt=json&version=2.2} hits=1 status=0 QTime=0
   [junit4]   2> 122136 INFO  (qtp783533656-2699) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&collection=collection1&wt=javabin&version=2}{add=[1000000_0 (1580188368992468992)]} 0 1
   [junit4]   2> 122136 INFO  (qtp1454681523-2705) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D1}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134018}_version_&rows=500&wt=json&version=2.2} hits=1 status=0 QTime=1
   [junit4]   2> 122138 INFO  (qtp783533656-2698) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D1}&fq={!frange+cost%3D100+incl%3Dfalse+l%3D1580188368834134020}_version_&rows=500&wt=json&version=2.2} hits=0 status=0 QTime=0
   [junit4]   2> 122140 INFO  (qtp783533656-2696) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&collection=collection1&wt=javabin&version=2}{add=[1000000_1 (1580188368997711872)]} 0 0
   [junit4]   2> 122145 INFO  (qtp783533656-2696) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/stream params={distrib=false&workerID=0&indent=off&numWorkers=2&expr=topic(collection1,collection1,q%3D"a_s:hello",fl%3Did,initialCheckpoint%3D0,partitionKeys%3Did,rows%3D500,zkHost%3D"127.0.0.1:56970/solr",id%3D2000000,initialCheckpoint%3D0,checkpointEvery%3D-1)&wt=json&version=2.2} status=0 QTime=0
   [junit4]   2> 122145 INFO  (qtp1454681523-2706) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/stream params={distrib=false&workerID=1&indent=off&numWorkers=2&expr=topic(collection1,collection1,q%3D"a_s:hello",fl%3Did,initialCheckpoint%3D0,partitionKeys%3Did,rows%3D500,zkHost%3D"127.0.0.1:56970/solr",id%3D2000000,initialCheckpoint%3D0,checkpointEvery%3D-1)&wt=json&version=2.2} status=0 QTime=0
   [junit4]   2> 122148 INFO  (qtp783533656-2699) [    ] o.a.s.h.c.RealTimeGetComponent LOOKUP_SLICE:shard2=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 122148 INFO  (qtp783533656-2693) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&omitHeader=true&shards.purpose=1&NOW=1506985062606&ids=2000000_1&isShard=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2&shards.qt=/get} status=0 QTime=0
   [junit4]   2> 122149 INFO  (qtp783533656-2699) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={qt=/get&ids=2000000_1&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 122149 INFO  (qtp783533656-2697) [    ] o.a.s.h.c.RealTimeGetComponent LOOKUP_SLICE:shard2=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/
   [junit4]   2> 122150 INFO  (qtp783533656-2695) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&omitHeader=true&shards.purpose=1&NOW=1506985062607&ids=2000000_0&isShard=true&shard.url=https://127.0.0.1:56972/solr/collection1_shard2_replica_n2/&wt=javabin&version=2&shards.qt=/get} status=0 QTime=0
   [junit4]   2> 122151 INFO  (qtp783533656-2697) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/get params={qt=/get&ids=2000000_0&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 122152 INFO  (qtp783533656-2700) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&initialCheckpoint=0&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D1}&rows=500&wt=json&version=2.2} hits=4 status=0 QTime=0
   [junit4]   2> 122155 INFO  (qtp1454681523-2708) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&initialCheckpoint=0&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D1}&rows=500&wt=json&version=2.2} hits=2 status=0 QTime=0
   [junit4]   2> 122155 INFO  (qtp783533656-2698) [    ] o.a.s.c.S.Request [collection1_shard2_replica_n2]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&initialCheckpoint=0&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D0}&rows=500&wt=json&version=2.2} hits=2 status=0 QTime=0
   [junit4]   2> 122157 INFO  (qtp783533656-2693) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&collection=collection1&wt=javabin&version=2}{add=[2000000_1 (1580188369016586240)]} 0 0
   [junit4]   2> 122157 INFO  (qtp1454681523-2704) [    ] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/solr path=/select params={q=a_s:hello&distrib=false&fl=id,_version_&initialCheckpoint=0&partitionKeys=id&sort=_version_+asc&fq={!hash+workers%3D2+worker%3D0}&rows=500&wt=json&version=2.2} hits=4 status=0 QTime=0
   [junit4]   2> 122159 INFO  (qtp783533656-2695) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard2_replica_n2]  webapp=/solr path=/update params={_stateVer_=collection1:4&collection=collection1&wt=javabin&version=2}{add=[2000000_0 (1580188369018683392)]} 0 0
   [junit4]   2> 122162 INFO  (qtp1454681523-2708) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=collection1:4&wt=javabin&version=2}{add=[12 (1580188369020780544), 13 (1580188369021829120)]} 0 1
   [junit4]   2> 122165 INFO  (qtp1454681523-2706) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188369024974848,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 122165 INFO  (qtp783533656-2698) [    ] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1580188369024974848,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 122165 INFO  (qtp1454681523-2706) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@5ca7ee5b commitCommandVersion:1580188369024974848
   [junit4]   2> 122165 INFO  (qtp783533656-2698) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@50b07988 commitCommandVersion:1580188369024974848
   [junit4]   2> 122313 INFO  (qtp783533656-2698) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@5abb4fea[collection1_shard2_replica_n2] main]
   [junit4]   2> 122313 INFO  (qt

[...truncated too long message...]

.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@43e8920a: rootName = solr_56976, domain = solr.core.mainCorpus.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.mainCorpus.shard1.replica_n1 / com.codahale.metrics.MetricRegistry@281bd44f
   [junit4]   2> 232954 INFO  (coreCloseExecutor-1094-thread-2) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.destination.shard1.leader, tag=283727047
   [junit4]   2> 232963 INFO  (coreCloseExecutor-1097-thread-2) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.mainCorpus.shard2.leader, tag=1927956941
   [junit4]   2> 232965 INFO  (coreCloseExecutor-1095-thread-3) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.mainCorpus.shard1.leader, tag=344706611
   [junit4]   2> 232969 INFO  (zkCallback-911-thread-2) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (2)
   [junit4]   2> 232969 INFO  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (2)
   [junit4]   2> 232980 INFO  (zkCallback-911-thread-2) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1)
   [junit4]   2> 232990 INFO  (jetty-closer-890-thread-1) [    ] o.a.s.c.Overseer Overseer (id=98761772786581516-127.0.0.1:56975_solr-n_0000000000) closing
   [junit4]   2> 232991 INFO  (OverseerStateUpdate-98761772786581516-127.0.0.1:56975_solr-n_0000000000) [    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:56975_solr
   [junit4]   2> 234435 WARN  (zkCallback-909-thread-2) [    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 234435 INFO  (jetty-closer-890-thread-4) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@225ab1bf{/solr,null,UNAVAILABLE}
   [junit4]   2> 234479 WARN  (zkCallback-910-thread-1) [    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 234479 INFO  (jetty-closer-890-thread-2) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@24cb4288{/solr,null,UNAVAILABLE}
   [junit4]   2> 234490 WARN  (zkCallback-907-thread-1) [    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 234491 INFO  (jetty-closer-890-thread-3) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@5e7ee8e1{/solr,null,UNAVAILABLE}
   [junit4]   2> 234503 WARN  (zkCallback-911-thread-2) [    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 234503 INFO  (jetty-closer-890-thread-1) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@1d0ef707{/solr,null,UNAVAILABLE}
   [junit4]   2> 234507 ERROR (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 234507 INFO  (SUITE-StreamExpressionTest-seed#[89BF67B7CD937318]-worker) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:56970 56970
   [junit4]   2> 234545 INFO  (Thread-397) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:56970 56970
   [junit4]   2> 234688 WARN  (Thread-397) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	98	/solr/aliases.json
   [junit4]   2> 	17	/solr/configs/conf
   [junit4]   2> 	7	/solr/configs/ml
   [junit4]   2> 	4	/solr/security.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	98	/solr/clusterprops.json
   [junit4]   2> 	98	/solr/clusterstate.json
   [junit4]   2> 	38	/solr/collections/destinationCollection/state.json
   [junit4]   2> 	26	/solr/collections/parallelDestinationCollection1/state.json
   [junit4]   2> 	24	/solr/collections/destination/state.json
   [junit4]   2> 	22	/solr/collections/mainCorpus/state.json
   [junit4]   2> 	20	/solr/collections/parallelDestinationCollection/state.json
   [junit4]   2> 	20	/solr/collections/workQueue/state.json
   [junit4]   2> 	12	/solr/collections/collection1/state.json
   [junit4]   2> 	10	/solr/collections/uknownCollection/state.json
   [junit4]   2> 	10	/solr/collections/modelCollection/state.json
   [junit4]   2> 	10	/solr/collections/checkpointCollection/state.json
   [junit4]   2> 	2	/solr/overseer_elect/election/98761772786581516-127.0.0.1:56975_solr-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	98	/solr/live_nodes
   [junit4]   2> 	98	/solr/collections
   [junit4]   2> 
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-7.x-MacOSX/solr/build/solr-solrj/test/J1/temp/solr.client.solrj.io.stream.StreamExpressionTest_89BF67B7CD937318-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): {name_s=PostingsFormat(name=Asserting), expr_s=BlockTreeOrds(blocksize=128), terms_ss=PostingsFormat(name=Direct), join2_s=PostingsFormat(name=Direct), field_s=PostingsFormat(name=Memory), multiDefault=PostingsFormat(name=Direct), subject=PostingsFormat(name=Direct), a1_s=PostingsFormat(name=Direct), a2_s=PostingsFormat(name=Asserting), level1_s=BlockTreeOrds(blocksize=128), body_t=PostingsFormat(name=Asserting), s_multi=PostingsFormat(name=Asserting), level2_s=PostingsFormat(name=Memory), col_s=PostingsFormat(name=Direct), a_ss=PostingsFormat(name=Memory), a_s=PostingsFormat(name=Asserting), tv_text=PostingsFormat(name=Memory), term_s=PostingsFormat(name=Direct), a_t=PostingsFormat(name=Memory), text_s=BlockTreeOrds(blocksize=128), id=BlockTreeOrds(blocksize=128), text=PostingsFormat(name=Asserting), featureSet_s=PostingsFormat(name=Direct), checkpoint_ss=PostingsFormat(name=Asserting), test_t=PostingsFormat(name=Asserting), ident_s=PostingsFormat(name=Direct), whitetok=PostingsFormat(name=Asserting), side_s=PostingsFormat(name=Asserting)}, docValues:{name_s=DocValuesFormat(name=Asserting), expr_s=DocValuesFormat(name=Direct), iteration_i=DocValuesFormat(name=Direct), terms_ss=DocValuesFormat(name=Lucene70), join2_s=DocValuesFormat(name=Lucene70), multiDefault=DocValuesFormat(name=Lucene70), intDefault=DocValuesFormat(name=Lucene70), a_f=DocValuesFormat(name=Lucene70), falseNegative_i=DocValuesFormat(name=Lucene70), a2_s=DocValuesFormat(name=Asserting), a_i=DocValuesFormat(name=Direct), level1_s=DocValuesFormat(name=Direct), a_s=DocValuesFormat(name=Asserting), id=DocValuesFormat(name=Direct), alpha_d=DocValuesFormat(name=Asserting), i_multi=DocValuesFormat(name=Direct), idf_d=DocValuesFormat(name=Lucene70), b_ls=DocValuesFormat(name=Lucene70), checkpoint_ss=DocValuesFormat(name=Asserting), ident_s=DocValuesFormat(name=Lucene70), order_i=DocValuesFormat(name=Lucene70), error_d=DocValuesFormat(name=Direct), side_s=DocValuesFormat(name=Asserting), truePositive_i=DocValuesFormat(name=Direct), miles_i=DocValuesFormat(name=Lucene70), field_s=DocValuesFormat(name=Lucene70), price_f=DocValuesFormat(name=Lucene70), a1_s=DocValuesFormat(name=Lucene70), join1_i=DocValuesFormat(name=Direct), join3_i=DocValuesFormat(name=Asserting), test_i=DocValuesFormat(name=Lucene70), falsePositive_i=DocValuesFormat(name=Lucene70), field_i=DocValuesFormat(name=Lucene70), s_multi=DocValuesFormat(name=Asserting), level2_s=DocValuesFormat(name=Lucene70), col_s=DocValuesFormat(name=Lucene70), a_ss=DocValuesFormat(name=Lucene70), score_f=DocValuesFormat(name=Asserting), term_s=DocValuesFormat(name=Lucene70), text_s=DocValuesFormat(name=Direct), timestamp=DocValuesFormat(name=Lucene70), idfs_ds=DocValuesFormat(name=Lucene70), featureSet_s=DocValuesFormat(name=Lucene70), weights_ds=DocValuesFormat(name=Asserting), trueNegative_i=DocValuesFormat(name=Asserting), index_i=DocValuesFormat(name=Lucene70), test_dt=DocValuesFormat(name=Direct), out_i=DocValuesFormat(name=Lucene70), _version_=DocValuesFormat(name=Lucene70)}, maxPointsInLeafNode=247, maxMBSortInHeap=7.330809014922133, sim=RandomSimilarity(queryNorm=true): {}, locale=kea, timezone=Asia/Almaty
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 9 (64-bit)/cpus=3,threads=1,free=129619008,total=470810624
   [junit4]   2> NOTE: All tests run in this JVM: [HttpSolrClientBuilderTest, StreamExpressionToExpessionTest, ReverseEvaluatorTest, CeilingEvaluatorTest, SolrQueryTest, TestNamedListCodec, CloudSolrClientBuilderTest, TestV1toV2ApiMapper, CommonAdminParamsTest, DocumentAnalysisResponseTest, ExclusiveOrEvaluatorTest, LessThanEvaluatorTest, HyperbolicSineEvaluatorTest, MergeIndexesEmbeddedTest, LargeVolumeJettyTest, SineEvaluatorTest, MultiplyEvaluatorTest, TestCloudSolrClientConnections, CumulativeProbabilityEvaluatorTest, TestCollectionStateWatchers, LargeVolumeBinaryJettyTest, SubtractEvaluatorTest, TestPolicy, LengthEvaluatorTest, ClientUtilsTest, NotEvaluatorTest, FacetFieldTest, SolrExampleBinaryTest, PowerEvaluatorTest, SolrExampleStreamingBinaryTest, LargeVolumeEmbeddedTest, SelectWithEvaluatorsTest, AppendEvaluatorTest, HttpSolrClientSSLAuthConPoolTest, ConcatOperationTest, SolrExampleJettyTest, TestBatchUpdate, TestSolrProperties, SolrParamTest, QueryResponseTest, TestCoreAdmin, TestJavaBinCodec, TestHash, TestFastInputStream, SolrDocumentTest, AnlysisResponseBaseTest, CollectionAdminRequestRequiredParamsTest, TestLBHttpSolrClient, LBHttpSolrClientBuilderTest, GraphExpressionTest, JdbcDriverTest, JdbcTest, StreamExpressionTest]
   [junit4] Completed [145/145 (1!)] on J1 in 116.78s, 117 tests, 1 error <<< FAILURES!

[...truncated 38833 lines...]