You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Policeman Jenkins Server <je...@thetaphi.de> on 2017/10/18 08:09:28 UTC

[JENKINS] Lucene-Solr-6.6-Linux (64bit/jdk-9) - Build # 175 - Unstable!

Build: https://jenkins.thetaphi.de/job/Lucene-Solr-6.6-Linux/175/
Java: 64bit/jdk-9 -XX:-UseCompressedOops -XX:+UseConcMarkSweepGC --illegal-access=deny

1 tests failed.
FAILED:  org.apache.solr.cloud.MissingSegmentRecoveryTest.testLeaderRecovery

Error Message:
expected:<10> but was:<0>

Stack Trace:
java.lang.AssertionError: expected:<10> but was:<0>
	at __randomizedtesting.SeedInfo.seed([6CED3D453FA9FFBA:3CB8A546668849A7]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.junit.Assert.failNotEquals(Assert.java:647)
	at org.junit.Assert.assertEquals(Assert.java:128)
	at org.junit.Assert.assertEquals(Assert.java:472)
	at org.junit.Assert.assertEquals(Assert.java:456)
	at org.apache.solr.cloud.MissingSegmentRecoveryTest.testLeaderRecovery(MissingSegmentRecoveryTest.java:108)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:564)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:844)




Build Log:
[...truncated 1708 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/core/test/temp/junit4-J1-20171018_070702_06517041499244509168839.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/core/test/temp/junit4-J2-20171018_070702_06517669289931988354623.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/core/test/temp/junit4-J0-20171018_070702_06513398059989566921856.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 273 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/test-framework/test/temp/junit4-J2-20171018_071317_20616688342256804843127.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/test-framework/test/temp/junit4-J1-20171018_071317_20616627089327754009342.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 13 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/test-framework/test/temp/junit4-J0-20171018_071317_2061081842548662360174.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 1027 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/common/test/temp/junit4-J0-20171018_071439_80215672725918451621691.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/common/test/temp/junit4-J1-20171018_071439_8016767561073394333341.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/common/test/temp/junit4-J2-20171018_071439_80111428748091788555021.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 216 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/icu/test/temp/junit4-J1-20171018_071615_9771511436703632610429.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/icu/test/temp/junit4-J2-20171018_071615_9776480131337131255849.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 5 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/icu/test/temp/junit4-J0-20171018_071615_9778110354961390694989.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 235 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/kuromoji/test/temp/junit4-J2-20171018_071627_4958886420921080433300.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 5 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/kuromoji/test/temp/junit4-J0-20171018_071627_49513475901185427281322.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/kuromoji/test/temp/junit4-J1-20171018_071627_4954222036429582609670.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 146 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/morfologik/test/temp/junit4-J1-20171018_071656_11912904189510015971481.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/morfologik/test/temp/junit4-J2-20171018_071656_1198446730645535477926.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/morfologik/test/temp/junit4-J0-20171018_071656_1195232671156522843016.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 158 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/phonetic/test/temp/junit4-J2-20171018_071659_6773620224064231652668.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/phonetic/test/temp/junit4-J0-20171018_071659_67713755283654661501859.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/phonetic/test/temp/junit4-J1-20171018_071659_6772628091479389192225.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 145 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/smartcn/test/temp/junit4-J0-20171018_071709_62617970199589896981102.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/smartcn/test/temp/junit4-J1-20171018_071709_6268633247878272096651.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 149 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/stempel/test/temp/junit4-J2-20171018_071717_33814978692563199075479.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/stempel/test/temp/junit4-J1-20171018_071717_33817164494238538355263.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/stempel/test/temp/junit4-J0-20171018_071717_33815449946738146201525.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 161 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/analysis/uima/test/temp/junit4-J0-20171018_071719_87811227591189898581143.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 154 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/backward-codecs/test/temp/junit4-J1-20171018_071726_90910372425437427231862.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 17 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/backward-codecs/test/temp/junit4-J2-20171018_071726_90913128232818938247475.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 12 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/backward-codecs/test/temp/junit4-J0-20171018_071726_90915338130203927701557.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 1344 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/benchmark/test/temp/junit4-J2-20171018_071854_4913933693110561562284.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/benchmark/test/temp/junit4-J1-20171018_071854_4917668753119425497359.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/benchmark/test/temp/junit4-J0-20171018_071854_4919696959936447712679.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 220 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/classification/test/temp/junit4-J1-20171018_071902_62513685151724035810343.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/classification/test/temp/junit4-J2-20171018_071902_6257934104606640861020.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/classification/test/temp/junit4-J0-20171018_071902_62515877323500071426911.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 249 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/codecs/test/temp/junit4-J0-20171018_071916_5863203405769631713333.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/codecs/test/temp/junit4-J1-20171018_071916_58611723249297201708186.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/codecs/test/temp/junit4-J2-20171018_071916_58610324283612809581332.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 226 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/demo/test/temp/junit4-J1-20171018_072054_9949583495080261789646.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/demo/test/temp/junit4-J0-20171018_072054_99415766598305017374154.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/demo/test/temp/junit4-J2-20171018_072054_9949474014532787918574.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 162 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/expressions/test/temp/junit4-J2-20171018_072057_8846001204022429450972.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/expressions/test/temp/junit4-J1-20171018_072057_8844182700251474130228.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/expressions/test/temp/junit4-J0-20171018_072057_88412404938355592577849.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 209 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/facet/test/temp/junit4-J2-20171018_072104_20916264207592751899740.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/facet/test/temp/junit4-J0-20171018_072104_20911160921053146883239.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/facet/test/temp/junit4-J1-20171018_072104_2093867517842830052112.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 156 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/grouping/test/temp/junit4-J1-20171018_072140_6285996031337476460224.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/grouping/test/temp/junit4-J2-20171018_072140_6286294210257384272824.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/grouping/test/temp/junit4-J0-20171018_072140_62810646634172981207779.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 229 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/highlighter/test/temp/junit4-J0-20171018_072153_10918109906946824681906.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/highlighter/test/temp/junit4-J1-20171018_072153_10913002685579828018741.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 18 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/highlighter/test/temp/junit4-J2-20171018_072153_1099920311334498815972.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 149 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/join/test/temp/junit4-J2-20171018_072221_38110407932190045181966.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/join/test/temp/junit4-J0-20171018_072221_3802259545271234234496.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/join/test/temp/junit4-J1-20171018_072221_3817482026472455914355.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 143 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/memory/test/temp/junit4-J0-20171018_072245_50715746851697985670384.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/memory/test/temp/junit4-J1-20171018_072245_50711403758317696550609.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 199 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/misc/test/temp/junit4-J0-20171018_072255_7154042313403609191257.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 16 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/misc/test/temp/junit4-J2-20171018_072255_7157096879772207872359.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/misc/test/temp/junit4-J1-20171018_072255_7158582566994513286323.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 309 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/queries/test/temp/junit4-J1-20171018_072322_30515125804472660710226.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/queries/test/temp/junit4-J2-20171018_072322_3054723169430349929923.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/queries/test/temp/junit4-J0-20171018_072322_3052505932961163672168.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 227 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/queryparser/test/temp/junit4-J2-20171018_072332_379558927978173939332.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/queryparser/test/temp/junit4-J0-20171018_072332_3791424204176958272494.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/queryparser/test/temp/junit4-J1-20171018_072332_37916219553055473746267.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 191 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/replicator/test/temp/junit4-J1-20171018_072340_23015636361596543987717.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 8 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/replicator/test/temp/junit4-J2-20171018_072340_23014468158302623455431.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/replicator/test/temp/junit4-J0-20171018_072340_23016361582589461948291.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 203 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/sandbox/test/temp/junit4-J0-20171018_072351_2855477085911355663193.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 13 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/sandbox/test/temp/junit4-J1-20171018_072351_2858990855085044175386.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/sandbox/test/temp/junit4-J2-20171018_072351_28513464936085181705118.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 211 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial-extras/test/temp/junit4-J0-20171018_072427_3853501372262876145898.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial-extras/test/temp/junit4-J1-20171018_072427_3862600206840016634165.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial-extras/test/temp/junit4-J2-20171018_072427_3873624620950806199673.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 151 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial/test/temp/junit4-J2-20171018_072440_1974514050000591059626.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 7 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial/test/temp/junit4-J0-20171018_072440_19713092922128071384602.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 11 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial/test/temp/junit4-J1-20171018_072440_19711931647633194858825.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 164 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial3d/test/temp/junit4-J0-20171018_072520_5702552655379542833453.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial3d/test/temp/junit4-J2-20171018_072520_5701163665074570959349.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 5 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/spatial3d/test/temp/junit4-J1-20171018_072520_5703524545099656434476.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 299 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/suggest/test/temp/junit4-J2-20171018_072615_2339926771419107726087.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/suggest/test/temp/junit4-J0-20171018_072615_23311952234030624707995.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/lucene/build/suggest/test/temp/junit4-J1-20171018_072615_2334776741723892119239.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 2113 lines...]
   [junit4] Suite: org.apache.solr.cloud.MissingSegmentRecoveryTest
   [junit4]   2> Creating dataDir: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/init-core-data-001
   [junit4]   2> 677165 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields
   [junit4]   2> 677166 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", value=0.0/0.0, ssl=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   2> 677166 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 2 servers in /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001
   [junit4]   2> 677166 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 677166 INFO  (Thread-2965) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 677166 INFO  (Thread-2965) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 677168 ERROR (Thread-2965) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 677267 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.c.ZkTestServer start zk server on port:41903
   [junit4]   2> 677270 INFO  (jetty-launcher-1914-thread-1) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 677270 INFO  (jetty-launcher-1914-thread-2) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 677271 INFO  (jetty-launcher-1914-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7ef2f4bc{/solr,null,AVAILABLE}
   [junit4]   2> 677271 INFO  (jetty-launcher-1914-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2eea180{/solr,null,AVAILABLE}
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@2c4e9398{HTTP/1.1,[http/1.1]}{127.0.0.1:38355}
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@34f9e6c4{HTTP/1.1,[http/1.1]}{127.0.0.1:40337}
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.e.j.s.Server Started @678923ms
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.e.j.s.Server Started @678923ms
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=38355}
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=40337}
   [junit4]   2> 677272 ERROR (jetty-launcher-1914-thread-1) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 677272 ERROR (jetty-launcher-1914-thread-2) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T07:38:26.730888Z
   [junit4]   2> 677272 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T07:38:26.730903Z
   [junit4]   2> 677273 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 677273 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 677276 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn caught end of stream exception
   [junit4]   2> EndOfStreamException: Unable to read additional data from client sessionid 0x15f2e6b53c10002, likely client has closed socket
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:239)
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:203)
   [junit4]   2> 	at java.base/java.lang.Thread.run(Thread.java:844)
   [junit4]   2> 677277 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 677277 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 677277 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:41903/solr
   [junit4]   2> 677277 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:41903/solr
   [junit4]   2> 677278 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn caught end of stream exception
   [junit4]   2> EndOfStreamException: Unable to read additional data from client sessionid 0x15f2e6b53c10004, likely client has closed socket
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:239)
   [junit4]   2> 	at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:203)
   [junit4]   2> 	at java.base/java.lang.Thread.run(Thread.java:844)
   [junit4]   2> 677285 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 677285 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 677285 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:38355_solr
   [junit4]   2> 677285 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40337_solr
   [junit4]   2> 677285 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.Overseer Overseer (id=98848755326976006-127.0.0.1:38355_solr-n_0000000000) starting
   [junit4]   2> 677286 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 677286 INFO  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 677297 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:38355_solr
   [junit4]   2> 677298 INFO  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 677298 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 677316 INFO  (jetty-launcher-1914-thread-1) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/.
   [junit4]   2> 677327 INFO  (jetty-launcher-1914-thread-2) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node2/.
   [junit4]   2> 677332 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 677332 INFO  (SUITE-MissingSegmentRecoveryTest-seed#[6CED3D453FA9FFBA]-worker) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:41903/solr ready
   [junit4]   2> 677339 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testLeaderRecovery
   [junit4]   2> 677340 INFO  (qtp150833946-8961) [    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params replicationFactor=2&collection.configName=conf&maxShardsPerNode=1&name=MissingSegmentRecoveryTest&action=CREATE&numShards=1&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 677342 INFO  (OverseerThreadFactory-3743-thread-1) [    ] o.a.s.c.CreateCollectionCmd Create collection MissingSegmentRecoveryTest
   [junit4]   2> 677444 INFO  (qtp150833946-8965) [    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf&newCollection=true&name=MissingSegmentRecoveryTest_shard1_replica1&action=CREATE&numShards=1&collection=MissingSegmentRecoveryTest&shard=shard1&wt=javabin&version=2
   [junit4]   2> 677444 INFO  (qtp150833946-8965) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 677445 INFO  (qtp668426116-8960) [    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf&newCollection=true&name=MissingSegmentRecoveryTest_shard1_replica2&action=CREATE&numShards=1&collection=MissingSegmentRecoveryTest&shard=shard1&wt=javabin&version=2
   [junit4]   2> 677445 INFO  (qtp668426116-8960) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 677548 INFO  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 677548 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 678454 INFO  (qtp668426116-8960) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 678454 INFO  (qtp150833946-8965) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 678464 INFO  (qtp150833946-8965) [    ] o.a.s.s.IndexSchema [MissingSegmentRecoveryTest_shard1_replica1] Schema name=minimal
   [junit4]   2> 678464 INFO  (qtp668426116-8960) [    ] o.a.s.s.IndexSchema [MissingSegmentRecoveryTest_shard1_replica2] Schema name=minimal
   [junit4]   2> 678466 INFO  (qtp150833946-8965) [    ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 678466 INFO  (qtp668426116-8960) [    ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 678466 INFO  (qtp668426116-8960) [    ] o.a.s.c.CoreContainer Creating SolrCore 'MissingSegmentRecoveryTest_shard1_replica2' using configuration from collection MissingSegmentRecoveryTest, trusted=true
   [junit4]   2> 678466 INFO  (qtp150833946-8965) [    ] o.a.s.c.CoreContainer Creating SolrCore 'MissingSegmentRecoveryTest_shard1_replica1' using configuration from collection MissingSegmentRecoveryTest, trusted=true
   [junit4]   2> 678466 INFO  (qtp668426116-8960) [    ] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 678466 INFO  (qtp150833946-8965) [    ] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 678467 INFO  (qtp668426116-8960) [    ] o.a.s.c.SolrCore [[MissingSegmentRecoveryTest_shard1_replica2] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/MissingSegmentRecoveryTest_shard1_replica2], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/./MissingSegmentRecoveryTest_shard1_replica2/data/]
   [junit4]   2> 678467 INFO  (qtp150833946-8965) [    ] o.a.s.c.SolrCore [[MissingSegmentRecoveryTest_shard1_replica1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node2/MissingSegmentRecoveryTest_shard1_replica1], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node2/./MissingSegmentRecoveryTest_shard1_replica1/data/]
   [junit4]   2> 678511 INFO  (qtp668426116-8960) [    ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 678511 INFO  (qtp668426116-8960) [    ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 678511 INFO  (qtp150833946-8965) [    ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 678511 INFO  (qtp150833946-8965) [    ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 678511 INFO  (qtp668426116-8960) [    ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 678512 INFO  (qtp668426116-8960) [    ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 678512 INFO  (qtp150833946-8965) [    ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 678512 INFO  (qtp150833946-8965) [    ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 678512 INFO  (qtp150833946-8965) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@2a7779fc[MissingSegmentRecoveryTest_shard1_replica1] main]
   [junit4]   2> 678512 INFO  (qtp668426116-8960) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@398f332b[MissingSegmentRecoveryTest_shard1_replica2] main]
   [junit4]   2> 678513 INFO  (qtp150833946-8965) [    ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 678513 INFO  (qtp668426116-8960) [    ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 678513 INFO  (qtp150833946-8965) [    ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 678513 INFO  (qtp150833946-8965) [    ] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 678513 INFO  (qtp668426116-8960) [    ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 678513 INFO  (qtp668426116-8960) [    ] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 678513 INFO  (searcherExecutor-3749-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica1] Registered new searcher Searcher@2a7779fc[MissingSegmentRecoveryTest_shard1_replica1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 678513 INFO  (searcherExecutor-3748-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica2] Registered new searcher Searcher@398f332b[MissingSegmentRecoveryTest_shard1_replica2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 678513 INFO  (qtp150833946-8965) [    ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1581580086642999296
   [junit4]   2> 678513 INFO  (qtp668426116-8960) [    ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1581580086642999296
   [junit4]   2> 678516 INFO  (qtp150833946-8965) [    ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 678516 INFO  (qtp150833946-8965) [    ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 678516 INFO  (qtp150833946-8965) [    ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:38355/solr/MissingSegmentRecoveryTest_shard1_replica1/
   [junit4]   2> 678516 INFO  (qtp150833946-8965) [    ] o.a.s.u.PeerSync PeerSync: core=MissingSegmentRecoveryTest_shard1_replica1 url=http://127.0.0.1:38355/solr START replicas=[http://127.0.0.1:40337/solr/MissingSegmentRecoveryTest_shard1_replica2/] nUpdates=100
   [junit4]   2> 678516 INFO  (qtp150833946-8965) [    ] o.a.s.u.PeerSync PeerSync: core=MissingSegmentRecoveryTest_shard1_replica1 url=http://127.0.0.1:38355/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 678517 INFO  (qtp668426116-8964) [    ] o.a.s.c.S.Request [MissingSegmentRecoveryTest_shard1_replica2]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 678517 INFO  (qtp150833946-8965) [    ] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 678517 INFO  (qtp150833946-8965) [    ] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 678517 INFO  (qtp150833946-8965) [    ] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 678518 INFO  (qtp150833946-8965) [    ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:38355/solr/MissingSegmentRecoveryTest_shard1_replica1/ shard1
   [junit4]   2> 678620 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 678620 INFO  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 678669 INFO  (qtp150833946-8965) [    ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 678671 INFO  (qtp150833946-8965) [    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf&newCollection=true&name=MissingSegmentRecoveryTest_shard1_replica1&action=CREATE&numShards=1&collection=MissingSegmentRecoveryTest&shard=shard1&wt=javabin&version=2} status=0 QTime=1226
   [junit4]   2> 678773 INFO  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 678773 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 679517 INFO  (qtp668426116-8960) [    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf&newCollection=true&name=MissingSegmentRecoveryTest_shard1_replica2&action=CREATE&numShards=1&collection=MissingSegmentRecoveryTest&shard=shard1&wt=javabin&version=2} status=0 QTime=2071
   [junit4]   2> 679518 INFO  (qtp150833946-8961) [    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 30 seconds. Check all shard replicas
   [junit4]   2> 679619 INFO  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 679619 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MissingSegmentRecoveryTest/state.json] for collection [MissingSegmentRecoveryTest] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 680519 INFO  (qtp150833946-8961) [    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={replicationFactor=2&collection.configName=conf&maxShardsPerNode=1&name=MissingSegmentRecoveryTest&action=CREATE&numShards=1&wt=javabin&version=2} status=0 QTime=3178
   [junit4]   2> 680526 INFO  (qtp668426116-8950) [    ] o.a.s.u.p.LogUpdateProcessorFactory [MissingSegmentRecoveryTest_shard1_replica2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:38355/solr/MissingSegmentRecoveryTest_shard1_replica1/&wt=javabin&version=2}{add=[0 (1581580088749588480), 1 (1581580088750637056), 2 (1581580088750637057), 3 (1581580088750637058), 4 (1581580088750637059), 5 (1581580088750637060), 6 (1581580088751685632), 7 (1581580088751685633), 8 (1581580088751685634), 9 (1581580088751685635)]} 0 0
   [junit4]   2> 680526 INFO  (qtp150833946-8963) [    ] o.a.s.u.p.LogUpdateProcessorFactory [MissingSegmentRecoveryTest_shard1_replica1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[0 (1581580088749588480), 1 (1581580088750637056), 2 (1581580088750637057), 3 (1581580088750637058), 4 (1581580088750637059), 5 (1581580088750637060), 6 (1581580088751685632), 7 (1581580088751685633), 8 (1581580088751685634), 9 (1581580088751685635)]} 0 4
   [junit4]   2> 680529 INFO  (qtp150833946-8961) [    ] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 680529 INFO  (qtp150833946-8961) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@4815f572
   [junit4]   2> 680530 INFO  (qtp668426116-8952) [    ] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 680530 INFO  (qtp668426116-8952) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@15a39f45
   [junit4]   2> 680628 INFO  (qtp668426116-8952) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@34c030ed[MissingSegmentRecoveryTest_shard1_replica2] main]
   [junit4]   2> 680628 INFO  (qtp150833946-8961) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@5be537a4[MissingSegmentRecoveryTest_shard1_replica1] main]
   [junit4]   2> 680629 INFO  (qtp668426116-8952) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 680629 INFO  (qtp150833946-8961) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 680629 INFO  (searcherExecutor-3748-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica2] Registered new searcher Searcher@34c030ed[MissingSegmentRecoveryTest_shard1_replica2] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_0(6.6.2):C10)))}
   [junit4]   2> 680629 INFO  (searcherExecutor-3749-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica1] Registered new searcher Searcher@5be537a4[MissingSegmentRecoveryTest_shard1_replica1] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_0(6.6.2):C10)))}
   [junit4]   2> 680629 INFO  (qtp668426116-8952) [    ] o.a.s.u.p.LogUpdateProcessorFactory [MissingSegmentRecoveryTest_shard1_replica2]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:38355/solr/MissingSegmentRecoveryTest_shard1_replica1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 98
   [junit4]   2> 680629 INFO  (qtp150833946-8961) [    ] o.a.s.u.p.LogUpdateProcessorFactory [MissingSegmentRecoveryTest_shard1_replica1]  webapp=/solr path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:38355/solr/MissingSegmentRecoveryTest_shard1_replica1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 99
   [junit4]   2> 680629 INFO  (qtp150833946-8953) [    ] o.a.s.u.p.LogUpdateProcessorFactory [MissingSegmentRecoveryTest_shard1_replica1]  webapp=/solr path=/update params={_stateVer_=MissingSegmentRecoveryTest:4&waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 101
   [junit4]   2> 680632 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@34f9e6c4{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 680632 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=1392276750
   [junit4]   2> 680632 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 680633 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.jvm
   [junit4]   2> 680633 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.jetty
   [junit4]   2> 680634 INFO  (coreCloseExecutor-3758-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica2]  CLOSING SolrCore org.apache.solr.core.SolrCore@2d80c2ad
   [junit4]   2> 680636 INFO  (coreCloseExecutor-3758-thread-1) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.MissingSegmentRecoveryTest.shard1.replica2
   [junit4]   2> 680637 WARN  (zkCallback-1926-thread-1) [    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 680637 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1)
   [junit4]   2> 680637 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@7ef2f4bc{/solr,null,UNAVAILABLE}
   [junit4]   2> 680638 INFO  (zkCallback-1932-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1)
   [junit4]   2> 680638 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 680639 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@79fa7b85{/solr,null,AVAILABLE}
   [junit4]   2> 680640 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7392c790{HTTP/1.1,[http/1.1]}{127.0.0.1:40337}
   [junit4]   2> 680640 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.e.j.s.Server Started @682292ms
   [junit4]   2> 680641 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=40337}
   [junit4]   2> 680641 ERROR (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 680641 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 680642 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 680642 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 680642 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T07:38:30.100189Z
   [junit4]   2> 680644 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 680657 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 680658 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:41903/solr
   [junit4]   2> 680667 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 680668 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 680670 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40337_solr
   [junit4]   2> 680671 INFO  (zkCallback-1925-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 680671 INFO  (zkCallback-1937-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 680672 INFO  (zkCallback-1932-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 680704 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/.
   [junit4]   2> 680704 INFO  (TEST-MissingSegmentRecoveryTest.testLeaderRecovery-seed#[6CED3D453FA9FFBA]) [    ] o.a.s.c.CorePropertiesLocator Cores are: [MissingSegmentRecoveryTest_shard1_replica2]
   [junit4]   2> 680705 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 680709 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 680712 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.s.IndexSchema [MissingSegmentRecoveryTest_shard1_replica2] Schema name=minimal
   [junit4]   2> 680713 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 680713 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.CoreContainer Creating SolrCore 'MissingSegmentRecoveryTest_shard1_replica2' using configuration from collection MissingSegmentRecoveryTest, trusted=true
   [junit4]   2> 680713 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 680713 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrCore [[MissingSegmentRecoveryTest_shard1_replica2] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/MissingSegmentRecoveryTest_shard1_replica2], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/./MissingSegmentRecoveryTest_shard1_replica2/data/]
   [junit4]   2> 680730 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 680730 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 680730 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 680730 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 680731 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica2]  CLOSING SolrCore org.apache.solr.core.SolrCore@17fbec51
   [junit4]   2> 680732 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.MissingSegmentRecoveryTest.shard1.replica2
   [junit4]   2> 680732 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.CoreContainer Found active leader, will attempt to create fresh core and recover.
   [junit4]   2> 680732 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrCore Updating index properties... index=index.20171018023830190
   [junit4]   2> 680738 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 680738 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.c.SolrCore [[MissingSegmentRecoveryTest_shard1_replica2] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/MissingSegmentRecoveryTest_shard1_replica2], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/./MissingSegmentRecoveryTest_shard1_replica2/data/]
   [junit4]   2> 680750 INFO  (OldIndexDirectoryCleanupThreadForCore-MissingSegmentRecoveryTest_shard1_replica2) [    ] o.a.s.c.DirectoryFactory Found 1 old index directories to clean-up under /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/./MissingSegmentRecoveryTest_shard1_replica2/data/ afterReload=false
   [junit4]   2> 680751 INFO  (OldIndexDirectoryCleanupThreadForCore-MissingSegmentRecoveryTest_shard1_replica2) [    ] o.a.s.c.DirectoryFactory Deleted old index directory: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MissingSegmentRecoveryTest_6CED3D453FA9FFBA-001/tempDir-001/node1/./MissingSegmentRecoveryTest_shard1_replica2/data/index
   [junit4]   2> 680770 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 680770 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 680771 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 680771 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 680771 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@11ea776e[MissingSegmentRecoveryTest_shard1_replica2] main]
   [junit4]   2> 680772 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 680772 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 680772 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 680772 INFO  (coreLoadExecutor-3765-thread-1) [    ] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 680772 INFO  (searcherExecutor-3771-thread-1) [    ] o.a.s.c.SolrCore [MissingSegmentRecoveryTest_shard1_replica2] Registered new searcher Searcher@11ea776e[MissingSegmentRecoveryTest_shard1_replica2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 680774 INFO  (coreZkRegister-3760-thread-1) [    ] o.a.s.c.ZkController Core needs to recover:MissingSegmentRecoveryTest_shard1_replica2
   [junit4]   2> 680775 INFO  (updateExecutor-1934-thread-1) [    ] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 680775 INFO  (recoveryExecutor-1935-thread-1) [    ] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 680775 INFO  (recoveryExecutor-1935-thread-1) [    ] o.a.s.c.RecoveryStrategy ###### startupVersions=[[1581580088751685635, 1581580088751685634, 1581580088751685633, 1581580088751685632, 1581580088750637060, 1581580088750637059, 1581580088750637058, 1581580088750637057, 1581580088750637056, 1581580088749588480]]
   [junit4]   2> 680775 INFO  (recoveryExecutor-1935-thread-1) [    ] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[MissingSegmentRecoveryTest_shard1_replica2]
   [junit4]   2> 680775 INFO  (qtp263261312-9030) [    ] o.a.s.c.S.Request [MissingSegmentRecoveryTest_shard1_replica2]  webapp=/solr path=/select params={q=*:*&_stateVer_=MissingSegmentRecoveryTest:4&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 680775 INFO  (recoveryExecutor-1935-thread-1) [    ] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 680775 INFO  (recoveryExecutor-1935-thread-1) [    ] o.a.s.c.RecoveryStrategy Publishing state of core [MissingSegmentRecoveryTest_shard1_replica2] as recovering, leader is [http://127.0.0.1:38355/solr/MissingSegmentRecoveryTest_shard1_replica1/] and I am [http://127.0.0.1:40337/solr/MissingSegmentRecoveryTest_shard1_replica2/]
   [junit4]   2> 680776 INFO  (recoveryExecutor-1935-thread-1) [    ] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:38355/solr]; [WaitForState: action=PREPRECOVERY&core=MissingSegmentRecoveryTest_shard1_replica1&nodeName=127.0.0.1:40337_solr&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 680777 INFO  (qtp150833946-8965) [    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :delete with params name=MissingSegmentRecoveryTest&action=DELETE&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 680777 INFO  (qtp150833946-8961) [    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 680777 INFO  (qtp150833946-8961) [    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see MissingSegmentRecoveryTest_shard1_replica1 (shard1 of MissingSegmentRecoveryTest) have state: recovering
   [junit4]   2> 680777 INFO  (qtp150833946-8961) [    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=MissingSegmentRecoveryTest, shard=shard1, thisCore=MissingSegmentRecoveryTest_shard1_replica1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=active, localState=active, nodeName=127.0.0.1:40337_solr, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"MissingSegmentRecoveryTest_shard1_replica2","base_url":"http://127.0.0.1:40337/solr","node_name":"127.0.0.1:40337_solr","state":"active"}
   [junit4]   2> 680781 INFO  (OverseerCollectionConfigSetProcessor-98848755326976006-127.0.0.1:38355_solr-n_0000000000) [    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 680781 INFO  (OverseerThreadFactory-3743-thread-2) [    ] o.a.s.c.OverseerCollectionMessageHandler Executing Collection Cmd : action=UNLOAD&deleteInstanceDir=true&deleteDataDir=true
   [junit4]   2> 680782 INFO  (qtp263261312-9032) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.MissingSegmentRecoveryTest.shard1.replica2
   [junit4]   2> 680782 WARN  (qtp263261312-9032) [    ] o.a.s.c.RecoveryStrategy Stopping recovery for core=[MissingSegmentRecoveryTest_shard1_replica2] coreNodeName=[core_node2]
   [junit4]   2> 680782 INFO  (qtp263261312-9032) [    ] o.a.s.c.SolrCore Core MissingSegmentRecoveryTest_shard1_replica2 is not yet closed, waiting 100 ms before checking again.
   [junit4]   2> 680783 ERROR (recoveryExecutor-1935-thread-1) [    ] o.a.s.c.RecoveryStrategy Error while trying to recover. core=MissingSegmentRecoveryTest_shard1_replica2:java.util.concurrent.ExecutionException: org.apache.solr.client.solrj.SolrServerException: IOException occured when talking to server at: http://127.0.0.1:38355/solr
   [junit4]   2> 	at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122)
   [junit4]   2> 	at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191)
   [junit4]   2> 	at org.apache.solr.cloud.RecoveryStrategy.sendPrepRecoveryCmd(RecoveryStrategy.java:678)
   [junit4]   2> 	at org.apache.solr.cloud.RecoveryStrategy.sendPrepRecoveryCmd(RecoveryStrategy.java:653)
   [junit4]   2> 	at org.apache.solr.cloud.RecoveryStrategy.doRecovery(RecoveryStrategy.java:413)
   [junit4]   2> 	at org.apache.solr.cloud.RecoveryStrategy.run(RecoveryStrategy.java:284)
   [junit4]   2> 	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedRunnable.run(InstrumentedExecutorService.java:176)
   [junit4]   2> 	at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:514)
   [junit4]   2> 	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
   [junit4]   2> 	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:229)
   [junit4]   2> 	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
   [junit4]   2> 	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
   [junit4]   2> 	at java.base/java.lang.Thread.run(Thread.java:844)
   [junit4]   2> Caused by: org.apache.solr.client.solrj.SolrServerException: IOException occured when talking to server at: http://127.0.0.1:38355/solr
   [junit4]   2> 	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:626)
   [junit4]   2> 	at org.apache.solr.client.solrj.impl.HttpSolrClient.lambda$httpUriRequest$0(HttpSolrClient.java:319)
   [junit4]   2> 	... 5 more
   [junit4]   2> Caused by: java.net.SocketException: Socket closed
   [junit4]   2> 	at java.base/java.net.SocketInputStream.read(SocketInputStream.java:204)
   [junit4]   2> 	at java.base/java.net.SocketInputStream.read(SocketInputStream.java:141)
   [junit4]   2> 	at org.apache.http.impl.io.AbstractSessionInputBuffer.fillBuffer(AbstractSessionInputBuffer.java:160)
   [junit4]   2> 	at org.apache.http.impl.io.SocketInputBuffer.fillBuffer(SocketInputBuffer.java:84)
   [junit4]   2> 	at org.apache.http.impl.io.AbstractSessionInputBuffer.readLine(AbstractSessionInputBuffer.java:273)
   [junit4]   2> 	at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:140)
   [junit4]   2> 	at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:57)
   [junit4]   2> 	at org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:261)
   [junit4]   2> 	at org.apache.http.impl.AbstractHttpClientConnection.receiveResponseHeader(AbstractHttpClientConnection.java:283)
   [junit4]   2> 	at org.apache.http.impl.conn.DefaultClientConnection.receiveResponseHeader(DefaultClientConnection.java:251)
   [junit4]   2> 	at org.apache.http.impl

[...truncated too long message...]

tim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-analytics/test/temp/junit4-J2-20171018_080521_38711990571620451913737.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-analytics/test/temp/junit4-J1-20171018_080521_3873803892573565822685.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 499 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-clustering/test/temp/junit4-J2-20171018_080534_573944500167165110724.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-clustering/test/temp/junit4-J1-20171018_080534_5732700278932559680104.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-clustering/test/temp/junit4-J0-20171018_080534_5739500720804652239508.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 1136 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-dataimporthandler-extras/test/temp/junit4-J1-20171018_080549_34516560449839668841747.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-dataimporthandler-extras/test/temp/junit4-J0-20171018_080549_34512270041796366246752.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 590 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-dataimporthandler/test/temp/junit4-J1-20171018_080556_26814210922827583675419.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-dataimporthandler/test/temp/junit4-J0-20171018_080556_2613745705231588620524.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-dataimporthandler/test/temp/junit4-J2-20171018_080556_26910960479619155052703.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 503 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-cell/test/temp/junit4-J0-20171018_080617_05017438766150482823875.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-cell/test/temp/junit4-J2-20171018_080617_0506973971435925487151.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-cell/test/temp/junit4-J1-20171018_080617_0501717957173577908792.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 507 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-langid/test/temp/junit4-J1-20171018_080626_53912013520202040540811.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-langid/test/temp/junit4-J0-20171018_080626_53911692753993584110198.syserr
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 590 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-ltr/test/temp/junit4-J0-20171018_080632_03213875329064125151474.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-ltr/test/temp/junit4-J1-20171018_080632_0327569770844029355032.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-ltr/test/temp/junit4-J2-20171018_080632_0324692841702463683802.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 540 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-uima/test/temp/junit4-J1-20171018_080658_13617913864155802152007.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-uima/test/temp/junit4-J0-20171018_080658_13513308857530418553372.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 494 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/contrib/solr-velocity/test/temp/junit4-J0-20171018_080706_77312787351466327981683.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 28898 lines...]

[JENKINS] Lucene-Solr-6.6-Linux (32bit/jdk1.8.0_144) - Build # 176 - Still Unstable!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-6.6-Linux/176/
Java: 32bit/jdk1.8.0_144 -server -XX:+UseG1GC

1 tests failed.
FAILED:  org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test

Error Message:
Expected 2 of 3 replicas to be active but only found 1; [core_node3:{"core":"c8n_1x3_lf_shard1_replica2","base_url":"http://127.0.0.1:36713","node_name":"127.0.0.1:36713_","state":"active","leader":"true"}]; clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/27)={   "replicationFactor":"3",   "shards":{"shard1":{       "range":"80000000-7fffffff",       "state":"active",       "replicas":{         "core_node1":{           "state":"down",           "base_url":"http://127.0.0.1:44057",           "core":"c8n_1x3_lf_shard1_replica1",           "node_name":"127.0.0.1:44057_"},         "core_node2":{           "core":"c8n_1x3_lf_shard1_replica3",           "base_url":"http://127.0.0.1:44247",           "node_name":"127.0.0.1:44247_",           "state":"down"},         "core_node3":{           "core":"c8n_1x3_lf_shard1_replica2",           "base_url":"http://127.0.0.1:36713",           "node_name":"127.0.0.1:36713_",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"1",   "autoAddReplicas":"false"}

Stack Trace:
java.lang.AssertionError: Expected 2 of 3 replicas to be active but only found 1; [core_node3:{"core":"c8n_1x3_lf_shard1_replica2","base_url":"http://127.0.0.1:36713","node_name":"127.0.0.1:36713_","state":"active","leader":"true"}]; clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/27)={
  "replicationFactor":"3",
  "shards":{"shard1":{
      "range":"80000000-7fffffff",
      "state":"active",
      "replicas":{
        "core_node1":{
          "state":"down",
          "base_url":"http://127.0.0.1:44057",
          "core":"c8n_1x3_lf_shard1_replica1",
          "node_name":"127.0.0.1:44057_"},
        "core_node2":{
          "core":"c8n_1x3_lf_shard1_replica3",
          "base_url":"http://127.0.0.1:44247",
          "node_name":"127.0.0.1:44247_",
          "state":"down"},
        "core_node3":{
          "core":"c8n_1x3_lf_shard1_replica2",
          "base_url":"http://127.0.0.1:36713",
          "node_name":"127.0.0.1:36713_",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"1",
  "autoAddReplicas":"false"}
	at __randomizedtesting.SeedInfo.seed([52A3B40E92477BDA:DAF78BD43CBB1622]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.junit.Assert.assertTrue(Assert.java:43)
	at org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.testRf3WithLeaderFailover(LeaderFailoverAfterPartitionTest.java:168)
	at org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test(LeaderFailoverAfterPartitionTest.java:55)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 11795 lines...]
   [junit4] Suite: org.apache.solr.cloud.LeaderFailoverAfterPartitionTest
   [junit4]   2> Creating dataDir: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/init-core-data-001
   [junit4]   2> 693893 WARN  (SUITE-LeaderFailoverAfterPartitionTest-seed#[52A3B40E92477BDA]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=2 numCloses=2
   [junit4]   2> 693894 INFO  (SUITE-LeaderFailoverAfterPartitionTest-seed#[52A3B40E92477BDA]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields
   [junit4]   2> 693895 INFO  (SUITE-LeaderFailoverAfterPartitionTest-seed#[52A3B40E92477BDA]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 693895 INFO  (SUITE-LeaderFailoverAfterPartitionTest-seed#[52A3B40E92477BDA]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 693896 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 693896 INFO  (Thread-1424) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 693896 INFO  (Thread-1424) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 693900 ERROR (Thread-1424) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 693996 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkTestServer start zk server on port:38605
   [junit4]   2> 694001 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 694001 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 694002 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 694002 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 694003 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 694003 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 694004 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 694004 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 694005 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 694005 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 694006 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractZkTestCase put /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 694066 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/control-001/cores/collection1
   [junit4]   2> 694067 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 694068 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@9f5300{/,null,AVAILABLE}
   [junit4]   2> 694068 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@e9af4a{HTTP/1.1,[http/1.1]}{127.0.0.1:๓๔๗๘๕}
   [junit4]   2> 694068 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server Started @๖๙๕๘๙๖ms
   [junit4]   2> 694068 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/tempDir-001/control/data, hostContext=/, hostPort=39243, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/control-001/cores}
   [junit4]   2> 694068 ERROR (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 694068 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 694069 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 694069 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 694069 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T15:41:15.831Z
   [junit4]   2> 694071 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 694071 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/control-001/solr.xml
   [junit4]   2> 694094 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 694094 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38605/solr
   [junit4]   2> 694103 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:39243_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 694104 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:39243_    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:39243_
   [junit4]   2> 694104 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:39243_    ] o.a.s.c.Overseer Overseer (id=98850653841588228-127.0.0.1:39243_-n_0000000000) starting
   [junit4]   2> 694106 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:39243_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:39243_
   [junit4]   2> 694107 INFO  (zkCallback-846-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 694197 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:39243_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/control-001/cores
   [junit4]   2> 694197 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:39243_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 694197 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 694198 INFO  (OverseerStateUpdate-98850653841588228-127.0.0.1:39243_-n_0000000000) [n:127.0.0.1:39243_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 695211 WARN  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 695212 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 695227 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 695298 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 695304 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 695304 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 695304 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/control-001/cores/collection1], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/control-001/cores/collection1/data/]
   [junit4]   2> 695305 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@77dc45
   [junit4]   2> 695305 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=48, maxMergedSegmentMB=68.57421875, floorSegmentMB=1.0830078125, forceMergeDeletesPctAllowed=21.822731206422706, segmentsPerTier=25.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.30505255139331733
   [junit4]   2> 695308 WARN  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 695330 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 695330 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 695331 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 695331 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 695331 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=44, maxMergeAtOnceExplicit=36, maxMergedSegmentMB=64.6962890625, floorSegmentMB=0.98828125, forceMergeDeletesPctAllowed=11.982498655858757, segmentsPerTier=30.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.23429108886169844
   [junit4]   2> 695331 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1690763[collection1] main]
   [junit4]   2> 695332 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 695332 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 695332 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 695333 INFO  (searcherExecutor-2495-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1690763[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 695333 INFO  (coreLoadExecutor-2494-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1581610462971166720
   [junit4]   2> 695336 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 695336 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 695336 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:39243/collection1/
   [junit4]   2> 695336 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 695336 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:39243/collection1/ has no replicas
   [junit4]   2> 695336 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 695337 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:39243/collection1/ shard1
   [junit4]   2> 695343 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 695343 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:38605/solr ready
   [junit4]   2> 695343 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 695400 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001/cores/collection1
   [junit4]   2> 695400 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001
   [junit4]   2> 695401 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 695401 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@13e930a{/,null,AVAILABLE}
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@bcf6a5{HTTP/1.1,[http/1.1]}{127.0.0.1:๓๗๓๙๗}
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server Started @๖๙๗๒๒๙ms
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/, hostPort=44057, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001/cores}
   [junit4]   2> 695402 ERROR (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 695402 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T15:41:17.164Z
   [junit4]   2> 695404 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 695404 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001/solr.xml
   [junit4]   2> 695408 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 695409 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38605/solr
   [junit4]   2> 695415 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44057_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 695415 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44057_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 695416 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44057_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:44057_
   [junit4]   2> 695416 INFO  (zkCallback-846-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 695416 INFO  (zkCallback-855-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 695417 INFO  (zkCallback-850-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 695488 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44057_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001/cores
   [junit4]   2> 695488 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44057_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 695488 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 695489 INFO  (OverseerStateUpdate-98850653841588228-127.0.0.1:39243_-n_0000000000) [n:127.0.0.1:39243_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard2
   [junit4]   2> 695638 INFO  (coreZkRegister-2487-thread-1-processing-n:127.0.0.1:39243_ x:collection1 c:control_collection) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 696504 WARN  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 696505 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 696521 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 696590 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 696596 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1, trusted=true
   [junit4]   2> 696596 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 696596 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001/cores/collection1], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 696596 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@77dc45
   [junit4]   2> 696597 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=48, maxMergedSegmentMB=68.57421875, floorSegmentMB=1.0830078125, forceMergeDeletesPctAllowed=21.822731206422706, segmentsPerTier=25.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.30505255139331733
   [junit4]   2> 696600 WARN  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 696624 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 696624 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 696624 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 696624 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 696625 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=44, maxMergeAtOnceExplicit=36, maxMergedSegmentMB=64.6962890625, floorSegmentMB=0.98828125, forceMergeDeletesPctAllowed=11.982498655858757, segmentsPerTier=30.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.23429108886169844
   [junit4]   2> 696625 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@f34151[collection1] main]
   [junit4]   2> 696626 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 696626 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 696626 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 696627 INFO  (searcherExecutor-2506-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@f34151[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 696627 INFO  (coreLoadExecutor-2505-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1581610464328024064
   [junit4]   2> 696631 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 696631 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 696631 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:44057/collection1/
   [junit4]   2> 696631 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 696631 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:44057/collection1/ has no replicas
   [junit4]   2> 696631 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 696632 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:44057/collection1/ shard2
   [junit4]   2> 696697 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores/collection1
   [junit4]   2> 696697 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001
   [junit4]   2> 696698 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 696699 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1f13456{/,null,AVAILABLE}
   [junit4]   2> 696699 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@965b3{HTTP/1.1,[http/1.1]}{127.0.0.1:๓๙๗๔๕}
   [junit4]   2> 696699 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server Started @๖๙๘๕๒๗ms
   [junit4]   2> 696699 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/, hostPort=44247, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores}
   [junit4]   2> 696699 ERROR (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 696700 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 696700 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 696700 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 696700 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T15:41:18.462Z
   [junit4]   2> 696701 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 696701 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/solr.xml
   [junit4]   2> 696705 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 696706 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38605/solr
   [junit4]   2> 696710 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44247_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 696711 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44247_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 696711 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44247_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:44247_
   [junit4]   2> 696712 INFO  (zkCallback-850-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 696712 INFO  (zkCallback-861-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 696712 INFO  (zkCallback-846-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 696712 INFO  (zkCallback-855-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 696768 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44247_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores
   [junit4]   2> 696768 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:44247_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 696769 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 696770 INFO  (OverseerStateUpdate-98850653841588228-127.0.0.1:39243_-n_0000000000) [n:127.0.0.1:39243_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 696883 INFO  (coreZkRegister-2500-thread-1-processing-n:127.0.0.1:44057_ x:collection1 c:collection1) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 697777 WARN  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 697777 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 697786 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 697881 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 697892 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1, trusted=true
   [junit4]   2> 697893 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 697893 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores/collection1], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 697893 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@77dc45
   [junit4]   2> 697894 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=48, maxMergedSegmentMB=68.57421875, floorSegmentMB=1.0830078125, forceMergeDeletesPctAllowed=21.822731206422706, segmentsPerTier=25.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.30505255139331733
   [junit4]   2> 697897 WARN  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 697926 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 697926 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 697927 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 697927 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 697928 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=44, maxMergeAtOnceExplicit=36, maxMergedSegmentMB=64.6962890625, floorSegmentMB=0.98828125, forceMergeDeletesPctAllowed=11.982498655858757, segmentsPerTier=30.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.23429108886169844
   [junit4]   2> 697928 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@63405b[collection1] main]
   [junit4]   2> 697940 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 697940 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 697940 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 697941 INFO  (coreLoadExecutor-2516-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1581610465705852928
   [junit4]   2> 697944 INFO  (searcherExecutor-2517-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@63405b[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 697950 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 697950 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 697950 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:44247/collection1/
   [junit4]   2> 697950 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 697950 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:44247/collection1/ has no replicas
   [junit4]   2> 697950 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 697952 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:44247/collection1/ shard1
   [junit4]   2> 698030 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001/cores/collection1
   [junit4]   2> 698030 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001
   [junit4]   2> 698031 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 698032 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@b66d73{/,null,AVAILABLE}
   [junit4]   2> 698032 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1f8c40a{HTTP/1.1,[http/1.1]}{127.0.0.1:๔๓๖๐๙}
   [junit4]   2> 698032 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.Server Started @๖๙๙๘๖๐ms
   [junit4]   2> 698033 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/, hostPort=36713, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001/cores}
   [junit4]   2> 698033 ERROR (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 698033 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 6.6.2
   [junit4]   2> 698033 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 698033 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 698033 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-10-18T15:41:19.795Z
   [junit4]   2> 698035 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 698035 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001/solr.xml
   [junit4]   2> 698042 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.u.UpdateShardHandler Creating UpdateShardHandler HTTP client with params: socketTimeout=340000&connTimeout=45000&retry=true
   [junit4]   2> 698044 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38605/solr
   [junit4]   2> 698053 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:36713_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 698054 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:36713_    ] o.a.s.c.Overseer Overseer (id=null) closing
   [junit4]   2> 698056 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:36713_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36713_
   [junit4]   2> 698056 INFO  (zkCallback-867-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 698057 INFO  (zkCallback-850-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 698056 INFO  (zkCallback-861-thread-1-processing-n:127.0.0.1:44247_) [n:127.0.0.1:44247_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 698056 INFO  (zkCallback-846-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 698056 INFO  (zkCallback-855-thread-1-processing-n:127.0.0.1:44057_) [n:127.0.0.1:44057_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 698102 INFO  (coreZkRegister-2511-thread-1-processing-n:127.0.0.1:44247_ x:collection1 c:collection1) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 698180 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:36713_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001/cores
   [junit4]   2> 698180 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [n:127.0.0.1:36713_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 698180 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 698184 INFO  (OverseerStateUpdate-98850653841588228-127.0.0.1:39243_-n_0000000000) [n:127.0.0.1:39243_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard2
   [junit4]   2> 699197 WARN  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 699198 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 699210 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 699297 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 699304 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1, trusted=true
   [junit4]   2> 699305 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 699305 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001/cores/collection1], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 699305 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@77dc45
   [junit4]   2> 699307 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=48, maxMergedSegmentMB=68.57421875, floorSegmentMB=1.0830078125, forceMergeDeletesPctAllowed=21.822731206422706, segmentsPerTier=25.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.30505255139331733
   [junit4]   2> 699379 WARN  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 699415 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 699415 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 699416 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 699416 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 699417 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=44, maxMergeAtOnceExplicit=36, maxMergedSegmentMB=64.6962890625, floorSegmentMB=0.98828125, forceMergeDeletesPctAllowed=11.982498655858757, segmentsPerTier=30.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.23429108886169844
   [junit4]   2> 699417 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@136e20c[collection1] main]
   [junit4]   2> 699418 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 699419 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 699419 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 699421 INFO  (searcherExecutor-2528-thread-1-processing-n:127.0.0.1:36713_ x:collection1 c:collection1) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@136e20c[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 699421 INFO  (coreLoadExecutor-2527-thread-1-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1581610467257745408
   [junit4]   2> 699423 INFO  (coreZkRegister-2522-thread-1-processing-n:127.0.0.1:36713_ x:collection1 c:collection1) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 699424 INFO  (updateExecutor-864-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 699424 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 699424 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 699424 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 699424 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 699424 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:44057/collection1/] and I am [http://127.0.0.1:36713/collection1/]
   [junit4]   2> 699432 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:44057]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:36713_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 699435 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=50768,localport=44057], receiveBufferSize:531000
   [junit4]   2> 699437 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=37397,localport=51210], receiveBufferSize=530904
   [junit4]   2> 699440 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 699440 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard2 of collection1) have state: recovering
   [junit4]   2> 699440 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard2, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:36713_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:36713","node_name":"127.0.0.1:36713_","state":"down"}
   [junit4]   2> 699924 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 699924 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30000 for each attempt
   [junit4]   2> 699924 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):30000
   [junit4]   2> 700440 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard2, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:36713_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:36713","node_name":"127.0.0.1:36713_","state":"recovering"}
   [junit4]   2> 700440 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 700440 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:36713_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1000
   [junit4]   2> 700941 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:44057/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 700941 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:36713 START replicas=[http://127.0.0.1:44057/collection1/] nUpdates=100
   [junit4]   2> 700942 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=50772,localport=44057], receiveBufferSize:531000
   [junit4]   2> 700942 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=37397,localport=51214], receiveBufferSize=530904
   [junit4]   2> 700944 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 700944 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 700945 INFO  (recoveryExecutor-865-thread-1-processing-n:127.0.0.1:36713_ x:collection1 s:shard2 c:collection1 r:core_node3) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 701924 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 701925 INFO  (SocketProxy-Acceptor-39243) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=43072,localport=39243], receiveBufferSize:531000
   [junit4]   2> 701925 INFO  (SocketProxy-Acceptor-39243) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=34785,localport=56646], receiveBufferSize=530904
   [junit4]   2> 701926 INFO  (qtp32596230-5428) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 701926 INFO  (qtp32596230-5428) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 701926 INFO  (qtp32596230-5428) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 701926 INFO  (qtp32596230-5428) [n:127.0.0.1:39243_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 0
   [junit4]   2> 701927 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=59420,localport=44247], receiveBufferSize:531000
   [junit4]   2> 701927 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=39745,localport=54636], receiveBufferSize=530904
   [junit4]   2> 701930 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=50784,localport=44057], receiveBufferSize:531000
   [junit4]   2> 701931 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=59424,localport=44247], receiveBufferSize:531000
   [junit4]   2> 701931 INFO  (SocketProxy-Acceptor-36713) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=45886,localport=36713], receiveBufferSize:531000
   [junit4]   2> 701931 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=37397,localport=51230], receiveBufferSize=530904
   [junit4]   2> 701931 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=39745,localport=54646], receiveBufferSize=530904
   [junit4]   2> 701932 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 701932 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 701932 INFO  (SocketProxy-Acceptor-36713) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=43609,localport=47942], receiveBufferSize=530904
   [junit4]   2> 701932 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 701932 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:44247/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 701933 INFO  (qtp15588423-5488) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 701933 INFO  (qtp15588423-5488) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 701933 INFO  (qtp15588423-5488) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 701934 INFO  (qtp15588423-5488) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:44247/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 1
   [junit4]   2> 701934 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 701934 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 701934 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 701934 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:44247/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 701936 INFO  (qtp15588423-5494) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 7
   [junit4]   2> 701937 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=50796,localport=44057], receiveBufferSize:531000
   [junit4]   2> 701937 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=37397,localport=51238], receiveBufferSize=530904
   [junit4]   2> 701938 INFO  (qtp1358857-5463) [n:127.0.0.1:44057_ c:collection1 s:shard2 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 701938 INFO  (SocketProxy-Acceptor-36713) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=45902,localport=36713], receiveBufferSize:531000
   [junit4]   2> 701939 INFO  (SocketProxy-Acceptor-36713) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=43609,localport=47950], receiveBufferSize=530904
   [junit4]   2> 701940 INFO  (qtp7786401-5518) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 701940 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=59444,localport=44247], receiveBufferSize:531000
   [junit4]   2> 701941 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=39745,localport=54660], receiveBufferSize=530904
   [junit4]   2> 701942 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 703942 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Creating collection with stateFormat=1: c8n_1x3_lf
   [junit4]   2> 703943 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=59500,localport=44247], receiveBufferSize:531000
   [junit4]   2> 703943 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=39745,localport=54716], receiveBufferSize=530904
   [junit4]   2> 703944 INFO  (qtp15588423-5494) [n:127.0.0.1:44247_    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params replicationFactor=3&maxShardsPerNode=1&name=c8n_1x3_lf&action=CREATE&numShards=1&stateFormat=1&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 703945 INFO  (OverseerThreadFactory-2492-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.CreateCollectionCmd Create collection c8n_1x3_lf
   [junit4]   2> 703945 INFO  (OverseerThreadFactory-2492-thread-1-processing-n:127.0.0.1:39243_) [n:127.0.0.1:39243_    ] o.a.s.c.CreateCollectionCmd Only one config set found in zk - using it:conf1
   [junit4]   2> 704048 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=50864,localport=44057], receiveBufferSize:531000
   [junit4]   2> 704048 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=59508,localport=44247], receiveBufferSize:531000
   [junit4]   2> 704048 INFO  (SocketProxy-Acceptor-36713) [    ] o.a.s.c.SocketProxy accepted Socket[addr=/127.0.0.1,port=45966,localport=36713], receiveBufferSize:531000
   [junit4]   2> 704049 INFO  (SocketProxy-Acceptor-44057) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=37397,localport=51310], receiveBufferSize=530904
   [junit4]   2> 704051 INFO  (SocketProxy-Acceptor-36713) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=43609,localport=48022], receiveBufferSize=530904
   [junit4]   2> 704051 INFO  (SocketProxy-Acceptor-44247) [    ] o.a.s.c.SocketProxy proxy connection Socket[addr=/127.0.0.1,port=39745,localport=54726], receiveBufferSize=530904
   [junit4]   2> 704051 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica1&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
   [junit4]   2> 704052 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica2&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
   [junit4]   2> 704052 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_    ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=c8n_1x3_lf_shard1_replica3&action=CREATE&numShards=1&collection=c8n_1x3_lf&shard=shard1&wt=javabin&version=2
   [junit4]   2> 705062 WARN  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 705063 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 705062 WARN  (qtp7786401-5523) [n:127.0.0.1:36713_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 705064 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 705062 WARN  (qtp1358857-5465) [n:127.0.0.1:44057_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 705069 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 6.6.2
   [junit4]   2> 705073 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.s.IndexSchema [c8n_1x3_lf_shard1_replica3] Schema name=test
   [junit4]   2> 705075 INFO  (qtp7786401-5523) [n:127.0.0.1:36713_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica2] o.a.s.s.IndexSchema [c8n_1x3_lf_shard1_replica2] Schema name=test
   [junit4]   2> 705080 INFO  (qtp1358857-5465) [n:127.0.0.1:44057_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica1] o.a.s.s.IndexSchema [c8n_1x3_lf_shard1_replica1] Schema name=test
   [junit4]   2> 705161 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 705166 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.CoreContainer Creating SolrCore 'c8n_1x3_lf_shard1_replica3' using configuration from collection c8n_1x3_lf, trusted=true
   [junit4]   2> 705166 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 705166 INFO  (qtp15588423-5493) [n:127.0.0.1:44247_ c:c8n_1x3_lf s:shard1  x:c8n_1x3_lf_shard1_replica3] o.a.s.c.SolrCore [[c8n_1x3_lf_shard1_replica3] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores/c8n_1x3_lf_shard1_replica3], dataDir=[/home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001/shard-2-001/cores/c8n_1x3_lf_shard1_replica3/data/]
   [junit4]   2> 70

[...truncated too long message...]

lang.Thread.run(Thread.java:748)
   [junit4]   2> 
   [junit4]   2> 821207 INFO  (qtp7786401-5702) [n:127.0.0.1:36713_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:44057_&onlyIfLeaderActive=true&core=c8n_1x3_lf_shard1_replica2&coreNodeName=core_node1&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=400 QTime=6004
   [junit4]   2> 821847 WARN  (zkCallback-867-thread-3-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 822815 WARN  (zkCallback-867-thread-5-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.SyncStrategy Closed, skipping sync up.
   [junit4]   2> 822815 INFO  (zkCallback-867-thread-5-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 822815 INFO  (zkCallback-867-thread-5-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@1c7f62a
   [junit4]   2> 822839 INFO  (zkCallback-867-thread-5-processing-n:127.0.0.1:36713_) [n:127.0.0.1:36713_ c:collection1 s:shard2 r:core_node3 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 822844 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@1f8c40a{HTTP/1.1,[http/1.1]}{127.0.0.1:๐}
   [junit4]   2> 822845 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@b66d73{/,null,UNAVAILABLE}
   [junit4]   2> 822846 ERROR (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 822847 INFO  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:38605 38605
   [junit4]   2> 827906 INFO  (Thread-1424) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:38605 38605
   [junit4]   2> 827906 WARN  (Thread-1424) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	5	/solr/aliases.json
   [junit4]   2> 	5	/solr/clusterprops.json
   [junit4]   2> 	4	/solr/security.json
   [junit4]   2> 	4	/solr/configs/conf1
   [junit4]   2> 	3	/solr/collections/c8n_1x3_lf/state.json
   [junit4]   2> 	3	/solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	5	/solr/clusterstate.json
   [junit4]   2> 	2	/solr/overseer_elect/election/98850653841588232-127.0.0.1:44057_-n_0000000001
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	5	/solr/live_nodes
   [junit4]   2> 	5	/solr/collections
   [junit4]   2> 	3	/solr/overseer/queue
   [junit4]   2> 	3	/solr/overseer/collection-queue-work
   [junit4]   2> 	3	/solr/overseer/queue-work
   [junit4]   2> 
   [junit4]   2> 827907 WARN  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SocketProxy Closing 1 connections to: http://127.0.0.1:44057/, target: http://127.0.0.1:37397/
   [junit4]   2> 827907 WARN  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SocketProxy Closing 16 connections to: http://127.0.0.1:36713/, target: http://127.0.0.1:43609/
   [junit4]   2> 827907 WARN  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SocketProxy Closing 0 connections to: http://127.0.0.1:44247/, target: http://127.0.0.1:39745/
   [junit4]   2> 827907 WARN  (TEST-LeaderFailoverAfterPartitionTest.test-seed#[52A3B40E92477BDA]) [    ] o.a.s.c.SocketProxy Closing 1 connections to: http://127.0.0.1:39243/, target: http://127.0.0.1:34785/
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=LeaderFailoverAfterPartitionTest -Dtests.method=test -Dtests.seed=52A3B40E92477BDA -Dtests.multiplier=3 -Dtests.slow=true -Dtests.locale=th-TH-u-nu-thai-x-lvariant-TH -Dtests.timezone=Africa/Dakar -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] FAILURE  134s J1 | LeaderFailoverAfterPartitionTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: Expected 2 of 3 replicas to be active but only found 1; [core_node3:{"core":"c8n_1x3_lf_shard1_replica2","base_url":"http://127.0.0.1:36713","node_name":"127.0.0.1:36713_","state":"active","leader":"true"}]; clusterState: DocCollection(c8n_1x3_lf//clusterstate.json/27)={
   [junit4]    >   "replicationFactor":"3",
   [junit4]    >   "shards":{"shard1":{
   [junit4]    >       "range":"80000000-7fffffff",
   [junit4]    >       "state":"active",
   [junit4]    >       "replicas":{
   [junit4]    >         "core_node1":{
   [junit4]    >           "state":"down",
   [junit4]    >           "base_url":"http://127.0.0.1:44057",
   [junit4]    >           "core":"c8n_1x3_lf_shard1_replica1",
   [junit4]    >           "node_name":"127.0.0.1:44057_"},
   [junit4]    >         "core_node2":{
   [junit4]    >           "core":"c8n_1x3_lf_shard1_replica3",
   [junit4]    >           "base_url":"http://127.0.0.1:44247",
   [junit4]    >           "node_name":"127.0.0.1:44247_",
   [junit4]    >           "state":"down"},
   [junit4]    >         "core_node3":{
   [junit4]    >           "core":"c8n_1x3_lf_shard1_replica2",
   [junit4]    >           "base_url":"http://127.0.0.1:36713",
   [junit4]    >           "node_name":"127.0.0.1:36713_",
   [junit4]    >           "state":"active",
   [junit4]    >           "leader":"true"}}}},
   [junit4]    >   "router":{"name":"compositeId"},
   [junit4]    >   "maxShardsPerNode":"1",
   [junit4]    >   "autoAddReplicas":"false"}
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([52A3B40E92477BDA:DAF78BD43CBB1622]:0)
   [junit4]    > 	at org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.testRf3WithLeaderFailover(LeaderFailoverAfterPartitionTest.java:168)
   [junit4]    > 	at org.apache.solr.cloud.LeaderFailoverAfterPartitionTest.test(LeaderFailoverAfterPartitionTest.java:55)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:992)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:967)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:748)
   [junit4]   2> 827911 INFO  (SUITE-LeaderFailoverAfterPartitionTest-seed#[52A3B40E92477BDA]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/workspace/Lucene-Solr-6.6-Linux/solr/build/solr-core/test/J1/temp/solr.cloud.LeaderFailoverAfterPartitionTest_52A3B40E92477BDA-001
   [junit4]   2> NOTE: test params are: codec=HighCompressionCompressingStoredFields(storedFieldsFormat=CompressingStoredFieldsFormat(compressionMode=HIGH_COMPRESSION, chunkSize=4, maxDocsPerChunk=9, blockSize=8), termVectorsFormat=CompressingTermVectorsFormat(compressionMode=HIGH_COMPRESSION, chunkSize=4, blockSize=8)), sim=RandomSimilarity(queryNorm=true,coord=yes): {}, locale=th-TH-u-nu-thai-x-lvariant-TH, timezone=Africa/Dakar
   [junit4]   2> NOTE: Linux 4.10.0-33-generic i386/Oracle Corporation 1.8.0_144 (32-bit)/cpus=8,threads=1,free=176400952,total=536870912
   [junit4]   2> NOTE: All tests run in this JVM: [TestStressVersions, TestDistributedStatsComponentCardinality, DistributedFacetPivotSmallTest, BasicFunctionalityTest, TestSQLHandlerNonCloud, TestCollectionAPIs, CollectionsAPIDistributedZkTest, SpatialFilterTest, RankQueryTest, TestWordDelimiterFilterFactory, ParsingFieldUpdateProcessorsTest, TestTolerantUpdateProcessorCloud, CSVRequestHandlerTest, ZkControllerTest, SchemaVersionSpecificBehaviorTest, TestLRUCache, TestUpdate, RemoteQueryErrorTest, TestClusterProperties, TestTrieFacet, ChaosMonkeySafeLeaderTest, BlockCacheTest, HdfsBasicDistributedZkTest, TestUnifiedSolrHighlighter, DistributedDebugComponentTest, TermsComponentTest, TestBM25SimilarityFactory, TestScoreJoinQPNoScore, SharedFSAutoReplicaFailoverUtilsTest, ShardRoutingTest, TestCharFilters, DeleteReplicaTest, TestConfigReload, ConnectionReuseTest, TestFileDictionaryLookup, SparseHLLTest, PreAnalyzedFieldManagedSchemaCloudTest, DistribCursorPagingTest, SolrCmdDistributorTest, DocValuesTest, JSONWriterTest, OpenCloseCoreStressTest, TestFuzzyAnalyzedSuggestions, SegmentsInfoRequestHandlerTest, ShardRoutingCustomTest, TestFieldTypeResource, TestSubQueryTransformerDistrib, DistributedFacetPivotLargeTest, WordBreakSolrSpellCheckerTest, TestCollapseQParserPlugin, RestartWhileUpdatingTest, TestOrdValues, TestSegmentSorting, URLClassifyProcessorTest, TestReloadDeadlock, TestLRUStatsCache, LoggingHandlerTest, TriLevelCompositeIdRoutingTest, TestQuerySenderListener, FullHLLTest, TestElisionMultitermQuery, SoftAutoCommitTest, TestRuleBasedAuthorizationPlugin, TestCollationFieldDocValues, TestEmbeddedSolrServerConstructors, TestDeleteCollectionOnDownNodes, MoveReplicaTest, TestGraphTermsQParserPlugin, TestFastOutputStream, TestJsonFacetRefinement, CollectionsAPISolrJTest, TestCSVResponseWriter, LeaderFailoverAfterPartitionTest]
   [junit4] Completed [253/713 (1!)] on J1 in 134.03s, 1 test, 1 failure <<< FAILURES!

[...truncated 40995 lines...]