You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Policeman Jenkins Server <je...@thetaphi.de> on 2017/01/15 02:20:59 UTC

[JENKINS] Lucene-Solr-master-MacOSX (64bit/jdk1.8.0) - Build # 3779 - Still Unstable!

Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3779/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseParallelGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:52661/_j/pu/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:52655/_j/pu/collection1]>

Stack Trace:
java.lang.AssertionError: PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:52661/_j/pu/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:52655/_j/pu/collection1]>
	at __randomizedtesting.SeedInfo.seed([D41EEA27C046A961:5C4AD5FD6EBAC499]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.junit.Assert.failNotEquals(Assert.java:647)
	at org.junit.Assert.assertEquals(Assert.java:128)
	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:162)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 10769 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/init-core-data-001
   [junit4]   2> 140574 INFO  (SUITE-PeerSyncReplicationTest-seed#[D41EEA27C046A961]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 140574 INFO  (SUITE-PeerSyncReplicationTest-seed#[D41EEA27C046A961]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /_j/pu
   [junit4]   2> 140576 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 140577 INFO  (Thread-277) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 140577 INFO  (Thread-277) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 140686 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkTestServer start zk server on port:52632
   [junit4]   2> 140735 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 140741 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 140746 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 140750 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 140755 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 140759 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 140764 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 140768 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 140772 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 140776 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 140780 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 143412 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/control-001/cores/collection1
   [junit4]   2> 143414 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 143417 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5deb5d62{/_j/pu,null,AVAILABLE}
   [junit4]   2> 143422 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1fb1413b{HTTP/1.1,[http/1.1]}{127.0.0.1:52638}
   [junit4]   2> 143422 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server Started @148876ms
   [junit4]   2> 143422 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/tempDir-001/control/data, hostContext=/_j/pu, hostPort=52638, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/control-001/cores}
   [junit4]   2> 143423 ERROR (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 143423 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 143423 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 143423 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 143423 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T00:56:08.874Z
   [junit4]   2> 143431 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 143431 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/control-001/solr.xml
   [junit4]   2> 143451 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52632/solr
   [junit4]   2> 143526 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:52638__j%2Fpu
   [junit4]   2> 143530 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.Overseer Overseer (id=97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) starting
   [junit4]   2> 143562 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:52638__j%2Fpu
   [junit4]   2> 143570 INFO  (zkCallback-237-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 143750 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/control-001/cores
   [junit4]   2> 143750 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 143757 INFO  (OverseerStateUpdate-97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 144793 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 144815 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 144919 WARN  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 144923 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 144946 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection control_collection
   [junit4]   2> 144964 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/control-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/control-001/cores/collection1/data/]
   [junit4]   2> 144964 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@6117ad09
   [junit4]   2> 144968 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=25, maxMergedSegmentMB=55.498046875, floorSegmentMB=0.869140625, forceMergeDeletesPctAllowed=2.6372752441783156, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8621167692779999
   [junit4]   2> 144990 WARN  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 145014 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 145014 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 145015 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 145015 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 145017 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=511249285, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 145019 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@791956bd[collection1] main]
   [junit4]   2> 145023 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 145024 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 145024 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 145026 INFO  (searcherExecutor-657-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@791956bd[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 145027 INFO  (coreLoadExecutor-656-thread-1-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu c:control_collection   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556550013920739328
   [junit4]   2> 145041 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 145041 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 145041 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:52638/_j/pu/collection1/
   [junit4]   2> 145041 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 145041 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:52638/_j/pu/collection1/ has no replicas
   [junit4]   2> 145051 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:52638/_j/pu/collection1/ shard1
   [junit4]   2> 145212 INFO  (coreZkRegister-649-thread-1-processing-n:127.0.0.1:52638__j%2Fpu x:collection1 c:control_collection) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 145354 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 145357 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:52632/solr ready
   [junit4]   2> 145357 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 145776 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001/cores/collection1
   [junit4]   2> 145781 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001
   [junit4]   2> 145782 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 145789 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4593e0cd{/_j/pu,null,AVAILABLE}
   [junit4]   2> 145789 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1c04f5fe{HTTP/1.1,[http/1.1]}{127.0.0.1:52655}
   [junit4]   2> 145790 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server Started @151244ms
   [junit4]   2> 145790 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/_j/pu, hostPort=52655, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001/cores}
   [junit4]   2> 145791 ERROR (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 145794 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 145794 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 145794 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 145794 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T00:56:11.245Z
   [junit4]   2> 145811 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 145811 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001/solr.xml
   [junit4]   2> 145829 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52632/solr
   [junit4]   2> 145860 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 145871 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:52655__j%2Fpu
   [junit4]   2> 145875 INFO  (zkCallback-237-thread-2-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 145877 INFO  (zkCallback-246-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 145877 INFO  (zkCallback-241-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 146011 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001/cores
   [junit4]   2> 146012 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 146018 INFO  (OverseerStateUpdate-97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 147072 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 147090 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 147196 WARN  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 147203 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 147235 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 147235 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 147235 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@6117ad09
   [junit4]   2> 147242 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=25, maxMergedSegmentMB=55.498046875, floorSegmentMB=0.869140625, forceMergeDeletesPctAllowed=2.6372752441783156, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8621167692779999
   [junit4]   2> 147259 WARN  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 147286 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 147287 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 147288 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 147288 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 147290 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=511249285, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 147291 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1d10ad27[collection1] main]
   [junit4]   2> 147293 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 147294 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 147294 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 147296 INFO  (searcherExecutor-668-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1d10ad27[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 147297 INFO  (coreLoadExecutor-667-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556550016301006848
   [junit4]   2> 147313 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 147314 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 147314 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:52655/_j/pu/collection1/
   [junit4]   2> 147314 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 147314 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:52655/_j/pu/collection1/ has no replicas
   [junit4]   2> 147324 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:52655/_j/pu/collection1/ shard1
   [junit4]   2> 147501 INFO  (coreZkRegister-662-thread-1-processing-n:127.0.0.1:52655__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 148092 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001/cores/collection1
   [junit4]   2> 148094 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001
   [junit4]   2> 148095 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 148098 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@41952eb9{/_j/pu,null,AVAILABLE}
   [junit4]   2> 148099 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@24f995fc{HTTP/1.1,[http/1.1]}{127.0.0.1:52661}
   [junit4]   2> 148100 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server Started @153553ms
   [junit4]   2> 148100 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/_j/pu, hostPort=52661, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001/cores}
   [junit4]   2> 148101 ERROR (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 148101 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 148102 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 148102 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 148102 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T00:56:13.553Z
   [junit4]   2> 148110 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 148110 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001/solr.xml
   [junit4]   2> 148137 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52632/solr
   [junit4]   2> 148170 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 148190 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:52661__j%2Fpu
   [junit4]   2> 148195 INFO  (zkCallback-237-thread-2-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 148196 INFO  (zkCallback-241-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 148196 INFO  (zkCallback-246-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 148199 INFO  (zkCallback-252-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 148278 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001/cores
   [junit4]   2> 148279 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 148285 INFO  (OverseerStateUpdate-97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 149310 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 149330 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 149488 WARN  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 149496 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 149530 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 149530 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 149530 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@6117ad09
   [junit4]   2> 149536 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=25, maxMergedSegmentMB=55.498046875, floorSegmentMB=0.869140625, forceMergeDeletesPctAllowed=2.6372752441783156, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8621167692779999
   [junit4]   2> 149582 WARN  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 149649 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 149649 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 149650 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 149650 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 149666 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=511249285, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 149668 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@4c4f5d27[collection1] main]
   [junit4]   2> 149670 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 149671 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 149671 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 149674 INFO  (coreLoadExecutor-678-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556550018793472000
   [junit4]   2> 149680 INFO  (searcherExecutor-679-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52661__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@4c4f5d27[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 149686 INFO  (coreZkRegister-673-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 149693 INFO  (updateExecutor-249-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 149694 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 149698 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 149698 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 149698 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 149698 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:52655/_j/pu/collection1/] and I am [http://127.0.0.1:52661/_j/pu/collection1/]
   [junit4]   2> 149710 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:52655/_j/pu]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:52661__j%252Fpu&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 149726 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 149728 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 149728 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:52661__j%2Fpu, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:52661/_j/pu","node_name":"127.0.0.1:52661__j%2Fpu","state":"down"}
   [junit4]   2> 150359 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001/cores/collection1
   [junit4]   2> 150361 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001
   [junit4]   2> 150362 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 150364 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@357ba09d{/_j/pu,null,AVAILABLE}
   [junit4]   2> 150365 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@31bed5c5{HTTP/1.1,[http/1.1]}{127.0.0.1:52666}
   [junit4]   2> 150365 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.Server Started @155818ms
   [junit4]   2> 150365 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/_j/pu, hostPort=52666, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001/cores}
   [junit4]   2> 150365 ERROR (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 150369 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 150369 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 150369 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 150369 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T00:56:15.820Z
   [junit4]   2> 150378 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 150378 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001/solr.xml
   [junit4]   2> 150449 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52632/solr
   [junit4]   2> 150476 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52666__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 150488 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52666__j%2Fpu    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:52666__j%2Fpu
   [junit4]   2> 150493 INFO  (zkCallback-246-thread-1-processing-n:127.0.0.1:52655__j%2Fpu) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 150494 INFO  (zkCallback-252-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 150495 INFO  (zkCallback-259-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 150495 INFO  (zkCallback-237-thread-3-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 150495 INFO  (zkCallback-241-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 150678 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52666__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001/cores
   [junit4]   2> 150678 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [n:127.0.0.1:52666__j%2Fpu    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 150684 INFO  (OverseerStateUpdate-97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 150732 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:52661__j%2Fpu, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:52661/_j/pu","node_name":"127.0.0.1:52661__j%2Fpu","state":"recovering"}
   [junit4]   2> 150732 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 150732 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:52661__j%252Fpu&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1006
   [junit4]   2> 151706 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 151730 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 151866 WARN  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 151870 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 151912 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 151913 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 151913 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@6117ad09
   [junit4]   2> 151918 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=25, maxMergedSegmentMB=55.498046875, floorSegmentMB=0.869140625, forceMergeDeletesPctAllowed=2.6372752441783156, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8621167692779999
   [junit4]   2> 151966 WARN  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 151992 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 151992 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 151993 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 151993 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 151995 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=511249285, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 151997 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@a5734a0[collection1] main]
   [junit4]   2> 151999 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 152005 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 152006 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 152009 INFO  (searcherExecutor-690-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@a5734a0[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 152009 INFO  (coreLoadExecutor-689-thread-1-processing-n:127.0.0.1:52666__j%2Fpu) [n:127.0.0.1:52666__j%2Fpu c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556550021241896960
   [junit4]   2> 152017 INFO  (coreZkRegister-684-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 c:collection1) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 152018 INFO  (updateExecutor-256-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 152019 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 152019 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 152019 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 152020 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 152020 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:52655/_j/pu/collection1/] and I am [http://127.0.0.1:52666/_j/pu/collection1/]
   [junit4]   2> 152024 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:52655/_j/pu]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:52666__j%252Fpu&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 152027 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 152028 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 152028 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:52666__j%2Fpu, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:52666/_j/pu","node_name":"127.0.0.1:52666__j%2Fpu","state":"down"}
   [junit4]   2> 152259 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 152259 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 for each attempt
   [junit4]   2> 152259 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 153030 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:52666__j%2Fpu, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:52666/_j/pu","node_name":"127.0.0.1:52666__j%2Fpu","state":"recovering"}
   [junit4]   2> 153030 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 153030 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:52666__j%252Fpu&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1003
   [junit4]   2> 157738 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:52655/_j/pu/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 157738 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:52661/_j/pu START replicas=[http://127.0.0.1:52655/_j/pu/collection1/] nUpdates=1000
   [junit4]   2> 157743 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 157743 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/_j/pu path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=2
   [junit4]   2> 157746 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 157746 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 157746 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 157746 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 157747 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 157747 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 157747 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 157747 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 157747 INFO  (recoveryExecutor-250-thread-1-processing-n:127.0.0.1:52661__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 160039 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:52655/_j/pu/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 160040 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:52666/_j/pu START replicas=[http://127.0.0.1:52655/_j/pu/collection1/] nUpdates=1000
   [junit4]   2> 160044 INFO  (qtp999240093-1310) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 160044 INFO  (qtp999240093-1310) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/_j/pu path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 160047 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 160047 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 160047 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 160047 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 160048 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 160048 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 160048 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 160048 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 160048 INFO  (recoveryExecutor-257-thread-1-processing-n:127.0.0.1:52666__j%2Fpu x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 160293 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 160297 INFO  (qtp1907210922-1272) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 160298 INFO  (qtp1907210922-1272) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 160299 INFO  (qtp1907210922-1272) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 160299 INFO  (qtp1907210922-1272) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 2
   [junit4]   2> 160307 INFO  (qtp999240093-1313) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 160307 INFO  (qtp999240093-1313) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 160308 INFO  (qtp999240093-1313) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 160309 INFO  (qtp999240093-1313) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 160311 INFO  (qtp1556452628-1337) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 160311 INFO  (qtp1299569275-1372) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 160312 INFO  (qtp1556452628-1337) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 160313 INFO  (qtp1556452628-1337) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 160313 INFO  (qtp1299569275-1372) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 160313 INFO  (qtp1299569275-1372) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 160313 INFO  (qtp1556452628-1337) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 160314 INFO  (qtp1299569275-1372) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 3
   [junit4]   2> 160317 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 15
   [junit4]   2> 160322 INFO  (qtp999240093-1314) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/_j/pu path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 160325 INFO  (qtp1556452628-1338) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/_j/pu path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 160329 INFO  (qtp1299569275-1373) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/_j/pu path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 162345 INFO  (qtp1907210922-1273) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556550032076832768)} 0 3
   [junit4]   2> 162352 INFO  (qtp1556452628-1338) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&_version_=-1556550032082075648&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556550032082075648)} 0 2
   [junit4]   2> 162353 INFO  (qtp1299569275-1373) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&_version_=-1556550032082075648&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556550032082075648)} 0 2
   [junit4]   2> 162354 INFO  (qtp999240093-1307) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556550032082075648)} 0 7
   [junit4]   2> 162371 INFO  (qtp1299569275-1375) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[0 (1556550032097804288)]} 0 4
   [junit4]   2> 162374 INFO  (qtp1556452628-1340) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[0 (1556550032097804288)]} 0 6
   [junit4]   2> 162374 INFO  (qtp999240093-1309) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{add=[0 (1556550032097804288)]} 0 13
   [junit4]   2> 162382 INFO  (qtp1299569275-1376) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[1 (1556550032112484352)]} 0 3
   [junit4]   2> 162383 INFO  (qtp1556452628-1341) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[1 (1556550032112484352)]} 0 3
   [junit4]   2> 162384 INFO  (qtp999240093-1311) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{add=[1 (1556550032112484352)]} 0 7
   [junit4]   2> 162390 INFO  (qtp1556452628-1342) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[2 (1556550032122970112)]} 0 1
   [junit4]   2> 162391 INFO  (qtp1299569275-1377) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[2 (1556550032122970112)]} 0 2
   [junit4]   2> 162391 INFO  (qtp999240093-1310) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{add=[2 (1556550032122970112)]} 0 5
   [junit4]   2> 162397 INFO  (qtp1556452628-1335) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[3 (1556550032131358720)]} 0 1
   [junit4]   2> 162398 INFO  (qtp1299569275-1370) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[3 (1556550032131358720)]} 0 1
   [junit4]   2> 162398 INFO  (qtp999240093-1313) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{add=[3 (1556550032131358720)]} 0 4
   [junit4]   2> 162405 INFO  (qtp1556452628-1337) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[4 (1556550032137650176)]} 0 1
   [junit4]   2> 162406 INFO  (qtp1299569275-1372) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[4 (1556550032137650176)]} 0 0
   [junit4]   2> 162407 INFO  (qtp999240093-1312) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={wt=javabin&version=2}{add=[4 (1556550032137650176)]} 0 6
   [junit4]   2> 162414 INFO  (qtp1556452628-1337) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[5 (1556550032146038784)]} 0 1
   [junit4]   2> 162415 INFO  (qtp1299569275-1372) [n:127.0.0.1:52666__j%2Fpu c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/_j/pu path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:52655/_j/pu/collection1/&wt=javabin&version=2}{add=[5 (1556550032146038784)]} 0 1
   [junit4]   2> 162416 INFO  (qtp999240093-1314) [n:127.0.0.1:52655__j%2Fpu c:collection1 s:shard1 r:

[...truncated too long message...]


   [junit4]   1>               "replicas":{
   [junit4]   1>                 "core_node1":{
   [junit4]   1>                   "core":"collection1",
   [junit4]   1>                   "base_url":"http://127.0.0.1:52655/_j/pu",
   [junit4]   1>                   "node_name":"127.0.0.1:52655__j%2Fpu",
   [junit4]   1>                   "state":"down",
   [junit4]   1>                   "leader":"true"},
   [junit4]   1>                 "core_node2":{
   [junit4]   1>                   "core":"collection1",
   [junit4]   1>                   "base_url":"http://127.0.0.1:52661/_j/pu",
   [junit4]   1>                   "node_name":"127.0.0.1:52661__j%2Fpu",
   [junit4]   1>                   "state":"active"},
   [junit4]   1>                 "core_node3":{
   [junit4]   1>                   "core":"collection1",
   [junit4]   1>                   "base_url":"http://127.0.0.1:52666/_j/pu",
   [junit4]   1>                   "node_name":"127.0.0.1:52666__j%2Fpu",
   [junit4]   1>                   "state":"down"}}}}}}
   [junit4]   1> 
   [junit4]   2> 180041 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 52638
   [junit4]   2> 180041 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=954710265
   [junit4]   2> 180044 INFO  (coreCloseExecutor-712-thread-1) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@c5ec063
   [junit4]   2> 180089 INFO  (coreCloseExecutor-712-thread-1) [n:127.0.0.1:52638__j%2Fpu c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 180090 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.Overseer Overseer (id=97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) closing
   [junit4]   2> 180091 INFO  (OverseerStateUpdate-97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:52638__j%2Fpu
   [junit4]   2> 180095 WARN  (zkCallback-237-thread-3-processing-n:127.0.0.1:52638__j%2Fpu) [n:127.0.0.1:52638__j%2Fpu    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 180096 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 180101 INFO  (zkCallback-266-thread-2-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1)
   [junit4]   2> 180101 INFO  (zkCallback-266-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:52661__j%2Fpu
   [junit4]   2> 180102 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@1fb1413b{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 180103 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@5deb5d62{/_j/pu,null,UNAVAILABLE}
   [junit4]   2> 180107 INFO  (zkCallback-266-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.Overseer Overseer (id=97284375578673169-127.0.0.1:52661__j%2Fpu-n_0000000004) starting
   [junit4]   2> 180115 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 52655
   [junit4]   2> 180116 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 52661
   [junit4]   2> 180116 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=2136652161
   [junit4]   2> 180130 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.Overseer Overseer (id=97284375578673169-127.0.0.1:52661__j%2Fpu-n_0000000004) closing
   [junit4]   2> 180131 INFO  (OverseerStateUpdate-97284375578673169-127.0.0.1:52661__j%2Fpu-n_0000000004) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:52661__j%2Fpu
   [junit4]   2> 180139 WARN  (zkCallback-266-thread-1-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 182450 WARN  (zkCallback-266-thread-3-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SyncStrategy Closed, skipping sync up.
   [junit4]   2> 182451 INFO  (zkCallback-266-thread-3-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@728ee179
   [junit4]   2> 182494 INFO  (zkCallback-266-thread-3-processing-n:127.0.0.1:52661__j%2Fpu) [n:127.0.0.1:52661__j%2Fpu c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 182495 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 182496 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@3580a500{HTTP/1.1,[http/1.1]}{127.0.0.1:52661}
   [junit4]   2> 182496 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@4df13601{/_j/pu,null,UNAVAILABLE}
   [junit4]   2> 182497 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 52666
   [junit4]   2> 182498 INFO  (TEST-PeerSyncReplicationTest.test-seed#[D41EEA27C046A961]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:52632 52632
   [junit4]   2> 183611 INFO  (Thread-277) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:52632 52632
   [junit4]   2> 185411 WARN  (Thread-277) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/aliases.json
   [junit4]   2> 	6	/solr/clusterprops.json
   [junit4]   2> 	5	/solr/security.json
   [junit4]   2> 	5	/solr/configs/conf1
   [junit4]   2> 	4	/solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/clusterstate.json
   [junit4]   2> 	2	/solr/overseer_elect/election/97284375578673160-127.0.0.1:52655__j%2Fpu-n_0000000001
   [junit4]   2> 	2	/solr/collections/collection1/leader_elect/shard1/election/97284375578673160-core_node1-n_0000000000
   [junit4]   2> 	2	/solr/overseer_elect/election/97284375578673156-127.0.0.1:52638__j%2Fpu-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	41	/solr/overseer/queue
   [junit4]   2> 	37	/solr/overseer/collection-queue-work
   [junit4]   2> 	6	/solr/collections
   [junit4]   2> 	5	/solr/live_nodes
   [junit4]   2> 	5	/solr/overseer/queue-work
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=PeerSyncReplicationTest -Dtests.method=test -Dtests.seed=D41EEA27C046A961 -Dtests.slow=true -Dtests.locale=pl-PL -Dtests.timezone=America/Shiprock -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] FAILURE 44.8s J1 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:52661/_j/pu/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:52655/_j/pu/collection1]>
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([D41EEA27C046A961:5C4AD5FD6EBAC499]:0)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:162)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 185415 INFO  (SUITE-PeerSyncReplicationTest-seed#[D41EEA27C046A961]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_D41EEA27C046A961-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70), sim=RandomSimilarity(queryNorm=false): {}, locale=pl-PL, timezone=America/Shiprock
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_102 (64-bit)/cpus=3,threads=1,free=93140968,total=307232768
   [junit4]   2> NOTE: All tests run in this JVM: [AsyncCallRequestStatusResponseTest, DebugComponentTest, TestLuceneMatchVersion, SliceStateTest, ScriptEngineTest, TestFieldTypeResource, CdcrVersionReplicationTest, EchoParamsTest, TestSubQueryTransformerCrossCore, TestSchemaResource, TestBulkSchemaConcurrent, SolrIndexSplitterTest, CircularListTest, DeleteNodeTest, NoCacheHeaderTest, TestReload, UpdateParamsTest, OverseerStatusTest, TestClusterProperties, PeerSyncReplicationTest]
   [junit4] Completed [44/678 (1!)] on J1 in 44.86s, 1 test, 1 failure <<< FAILURES!

[...truncated 64527 lines...]


[JENKINS] Lucene-Solr-master-MacOSX (64bit/jdk1.8.0) - Build # 3783 - Still Unstable!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3783/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
timeout waiting to see all nodes active

Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
	at __randomizedtesting.SeedInfo.seed([7150ECE2BD11873D:F904D33813EDEAC5]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
	at org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
	at org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 11299 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/init-core-data-001
   [junit4]   2> 1125579 INFO  (SUITE-PeerSyncReplicationTest-seed#[7150ECE2BD11873D]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 1125579 INFO  (SUITE-PeerSyncReplicationTest-seed#[7150ECE2BD11873D]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 1125582 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1125582 INFO  (Thread-1430) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1125582 INFO  (Thread-1430) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 1125684 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkTestServer start zk server on port:62557
   [junit4]   2> 1125720 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 1125724 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 1125727 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 1125731 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 1125734 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 1125736 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 1125740 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 1125743 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 1125746 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 1125749 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 1125752 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 1126112 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/control-001/cores/collection1
   [junit4]   2> 1126132 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1126135 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3cddd3e5{/,null,AVAILABLE}
   [junit4]   2> 1126143 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@79c37fa6{SSL,[ssl, http/1.1]}{127.0.0.1:62562}
   [junit4]   2> 1126143 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server Started @1131746ms
   [junit4]   2> 1126143 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/tempDir-001/control/data, hostContext=/, hostPort=62562, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/control-001/cores}
   [junit4]   2> 1126143 ERROR (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1126144 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 1126144 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1126144 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1126144 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T18:02:09.722Z
   [junit4]   2> 1126149 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1126149 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/control-001/solr.xml
   [junit4]   2> 1126160 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:62557/solr
   [junit4]   2> 1126213 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62562_    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:62562_
   [junit4]   2> 1126216 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62562_    ] o.a.s.c.Overseer Overseer (id=97294072490426373-127.0.0.1:62562_-n_0000000000) starting
   [junit4]   2> 1126234 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62562_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:62562_
   [junit4]   2> 1126238 INFO  (zkCallback-1962-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1126285 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62562_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/control-001/cores
   [junit4]   2> 1126285 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62562_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1126292 INFO  (OverseerStateUpdate-97294072490426373-127.0.0.1:62562_-n_0000000000) [n:127.0.0.1:62562_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 1127318 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1127335 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 1127451 WARN  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 1127455 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1127478 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection control_collection
   [junit4]   2> 1127479 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/control-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/control-001/cores/collection1/data/]
   [junit4]   2> 1127479 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@4e07a194
   [junit4]   2> 1127483 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=392587938, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 1127492 WARN  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1127512 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1127512 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1127513 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1127513 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1127513 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=41, maxMergedSegmentMB=97.142578125, floorSegmentMB=0.5673828125, forceMergeDeletesPctAllowed=13.06025776580354, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8684908353858564
   [junit4]   2> 1127515 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@572e4e44[collection1] main]
   [junit4]   2> 1127517 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1127518 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1127518 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1127520 INFO  (searcherExecutor-3363-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@572e4e44[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1127520 INFO  (coreLoadExecutor-3362-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556705161875816448
   [junit4]   2> 1127532 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1127532 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1127532 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:62562/collection1/
   [junit4]   2> 1127532 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 1127533 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy https://127.0.0.1:62562/collection1/ has no replicas
   [junit4]   2> 1127541 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:62562/collection1/ shard1
   [junit4]   2> 1127625 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1127627 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:62557/solr ready
   [junit4]   2> 1127627 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 1127627 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Creating collection1 with stateFormat=2
   [junit4]   2> 1127648 INFO  (coreZkRegister-3355-thread-1-processing-n:127.0.0.1:62562_ x:collection1 c:control_collection) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1127993 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001/cores/collection1
   [junit4]   2> 1127996 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001
   [junit4]   2> 1128013 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1128016 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@431cd114{/,null,AVAILABLE}
   [junit4]   2> 1128019 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7e87d7a4{SSL,[ssl, http/1.1]}{127.0.0.1:62569}
   [junit4]   2> 1128019 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server Started @1133623ms
   [junit4]   2> 1128019 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/, hostPort=62569, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001/cores}
   [junit4]   2> 1128020 ERROR (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1128021 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 1128021 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1128021 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1128021 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T18:02:11.599Z
   [junit4]   2> 1128039 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1128039 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001/solr.xml
   [junit4]   2> 1128057 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:62557/solr
   [junit4]   2> 1128078 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1128089 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62569_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:62569_
   [junit4]   2> 1128092 INFO  (zkCallback-1962-thread-2-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1128092 INFO  (zkCallback-1966-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1128094 INFO  (zkCallback-1972-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1128242 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62569_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001/cores
   [junit4]   2> 1128242 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62569_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1128246 INFO  (OverseerStateUpdate-97294072490426373-127.0.0.1:62562_-n_0000000000) [n:127.0.0.1:62562_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 1128356 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 1129273 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1129292 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 1129454 WARN  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 1129456 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1129493 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 1129493 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 1129494 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@4e07a194
   [junit4]   2> 1129500 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=392587938, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 1129531 WARN  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1129584 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1129584 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1129586 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1129586 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1129587 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=41, maxMergedSegmentMB=97.142578125, floorSegmentMB=0.5673828125, forceMergeDeletesPctAllowed=13.06025776580354, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8684908353858564
   [junit4]   2> 1129622 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@4bdae585[collection1] main]
   [junit4]   2> 1129624 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1129624 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1129624 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1129626 INFO  (searcherExecutor-3374-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@4bdae585[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1129627 INFO  (coreLoadExecutor-3373-thread-1-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556705164085166080
   [junit4]   2> 1129644 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1129645 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1129645 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:62569/collection1/
   [junit4]   2> 1129645 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 1129645 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy https://127.0.0.1:62569/collection1/ has no replicas
   [junit4]   2> 1129655 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:62569/collection1/ shard1
   [junit4]   2> 1129762 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 1129767 INFO  (coreZkRegister-3368-thread-1-processing-n:127.0.0.1:62569_ x:collection1 c:collection1) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1129875 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 1130056 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001/cores/collection1
   [junit4]   2> 1130059 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001
   [junit4]   2> 1130073 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1130080 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2d8a43ab{/,null,AVAILABLE}
   [junit4]   2> 1130104 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7c3fc48f{SSL,[ssl, http/1.1]}{127.0.0.1:62588}
   [junit4]   2> 1130104 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server Started @1135707ms
   [junit4]   2> 1130104 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/, hostPort=62588, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001/cores}
   [junit4]   2> 1130104 ERROR (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1130111 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 1130113 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1130113 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1130116 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T18:02:13.694Z
   [junit4]   2> 1130149 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1130149 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001/solr.xml
   [junit4]   2> 1130178 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:62557/solr
   [junit4]   2> 1130202 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1130223 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62588_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:62588_
   [junit4]   2> 1130247 INFO  (zkCallback-1966-thread-2) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1130247 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1130248 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1130247 INFO  (zkCallback-1962-thread-2-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1130348 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1130386 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62588_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001/cores
   [junit4]   2> 1130386 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62588_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1130393 INFO  (OverseerStateUpdate-97294072490426373-127.0.0.1:62562_-n_0000000000) [n:127.0.0.1:62562_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 1130504 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1130504 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1131412 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1131428 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 1131588 WARN  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 1131590 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1131613 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 1131613 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 1131613 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@4e07a194
   [junit4]   2> 1131617 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=392587938, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 1131634 WARN  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1131674 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1131674 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1131675 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1131675 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1131676 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=41, maxMergedSegmentMB=97.142578125, floorSegmentMB=0.5673828125, forceMergeDeletesPctAllowed=13.06025776580354, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8684908353858564
   [junit4]   2> 1131678 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@2ffd195e[collection1] main]
   [junit4]   2> 1131685 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1131688 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1131688 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1131691 INFO  (searcherExecutor-3385-thread-1-processing-n:127.0.0.1:62588_ x:collection1 c:collection1) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@2ffd195e[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1131692 INFO  (coreLoadExecutor-3384-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556705166250475520
   [junit4]   2> 1131698 INFO  (coreZkRegister-3379-thread-1-processing-n:127.0.0.1:62588_ x:collection1 c:collection1) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 1131699 INFO  (updateExecutor-1975-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 1131699 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 1131699 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 1131699 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 1131699 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 1131699 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [https://127.0.0.1:62569/collection1/] and I am [https://127.0.0.1:62588/collection1/]
   [junit4]   2> 1131704 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [https://127.0.0.1:62569]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:62588_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 1131760 INFO  (qtp224525690-9502) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 1131760 INFO  (qtp224525690-9502) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 1131761 INFO  (qtp224525690-9502) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:62588_, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"https://127.0.0.1:62588","node_name":"127.0.0.1:62588_","state":"down"}
   [junit4]   2> 1131826 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1131827 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1132127 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001/cores/collection1
   [junit4]   2> 1132129 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001
   [junit4]   2> 1132149 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1132151 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7f06bc24{/,null,AVAILABLE}
   [junit4]   2> 1132157 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@6cbe1359{SSL,[ssl, http/1.1]}{127.0.0.1:62604}
   [junit4]   2> 1132157 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.Server Started @1137760ms
   [junit4]   2> 1132157 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/, hostPort=62604, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001/cores}
   [junit4]   2> 1132157 ERROR (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1132160 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 1132160 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1132160 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1132160 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T18:02:15.738Z
   [junit4]   2> 1132189 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1132189 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001/solr.xml
   [junit4]   2> 1132202 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:62557/solr
   [junit4]   2> 1132221 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62604_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1132233 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62604_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:62604_
   [junit4]   2> 1132237 INFO  (zkCallback-1966-thread-2) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1132237 INFO  (zkCallback-1962-thread-1-processing-n:127.0.0.1:62562_) [n:127.0.0.1:62562_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1132238 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1132238 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1132239 INFO  (zkCallback-1985-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1132293 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62604_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001/cores
   [junit4]   2> 1132294 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [n:127.0.0.1:62604_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 1132307 INFO  (OverseerStateUpdate-97294072490426373-127.0.0.1:62562_-n_0000000000) [n:127.0.0.1:62562_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 1132418 INFO  (zkCallback-1985-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1132418 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1132418 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1132771 INFO  (qtp224525690-9502) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:62588_, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"https://127.0.0.1:62588","node_name":"127.0.0.1:62588_","state":"recovering"}
   [junit4]   2> 1132771 INFO  (qtp224525690-9502) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 1132771 INFO  (qtp224525690-9502) [n:127.0.0.1:62569_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:62588_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1012
   [junit4]   2> 1133331 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1133354 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 1133462 WARN  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 1133464 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1133490 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 1133490 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 1133490 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@4e07a194
   [junit4]   2> 1133494 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=392587938, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 1133507 WARN  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1133532 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1133532 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1133533 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1133533 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1133534 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=21, maxMergeAtOnceExplicit=41, maxMergedSegmentMB=97.142578125, floorSegmentMB=0.5673828125, forceMergeDeletesPctAllowed=13.06025776580354, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8684908353858564
   [junit4]   2> 1133535 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@c4c4607[collection1] main]
   [junit4]   2> 1133537 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1133538 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1133538 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1133539 INFO  (searcherExecutor-3396-thread-1-processing-n:127.0.0.1:62604_ x:collection1 c:collection1) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@c4c4607[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1133540 INFO  (coreLoadExecutor-3395-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556705168188243968
   [junit4]   2> 1133546 INFO  (coreZkRegister-3390-thread-1-processing-n:127.0.0.1:62604_ x:collection1 c:collection1) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 1133547 INFO  (updateExecutor-1982-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 1133547 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 1133547 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 1133548 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 1133548 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 1133548 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [https://127.0.0.1:62569/collection1/] and I am [https://127.0.0.1:62604/collection1/]
   [junit4]   2> 1133551 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [https://127.0.0.1:62569]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:62604_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 1133565 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 1133566 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 1133566 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:62604_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"https://127.0.0.1:62604","node_name":"127.0.0.1:62604_","state":"down"}
   [junit4]   2> 1133628 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 1133628 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 for each attempt
   [junit4]   2> 1133628 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 1133667 INFO  (zkCallback-1985-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1133667 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1133667 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1134569 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:62604_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"https://127.0.0.1:62604","node_name":"127.0.0.1:62604_","state":"recovering"}
   [junit4]   2> 1134569 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 1134569 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:62604_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1004
   [junit4]   2> 1139774 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [https://127.0.0.1:62569/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 1139774 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=https://127.0.0.1:62588 START replicas=[https://127.0.0.1:62569/collection1/] nUpdates=1000
   [junit4]   2> 1139788 INFO  (qtp224525690-9504) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 1139789 INFO  (qtp224525690-9504) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 1139792 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 1139792 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 1139792 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1139792 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1139793 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1139793 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 1139793 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 1139793 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 1139793 INFO  (recoveryExecutor-1976-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 1139800 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1139800 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1139800 INFO  (zkCallback-1985-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1141572 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [https://127.0.0.1:62569/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 1141572 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=https://127.0.0.1:62604 START replicas=[https://127.0.0.1:62569/collection1/] nUpdates=1000
   [junit4]   2> 1141583 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 1141584 INFO  (qtp224525690-9500) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 1141586 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 1141586 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 1141586 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1141586 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1141587 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1141587 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 1141587 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 1141587 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 1141587 INFO  (recoveryExecutor-1983-thread-1-processing-n:127.0.0.1:62604_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 1141593 INFO  (zkCallback-1985-thread-1-processing-n:127.0.0.1:62604_) [n:127.0.0.1:62604_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1141593 INFO  (zkCallback-1978-thread-1-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1141593 INFO  (zkCallback-1972-thread-2-processing-n:127.0.0.1:62569_) [n:127.0.0.1:62569_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 1141677 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 1141709 INFO  (qtp2012017196-9464) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1141710 INFO  (qtp2012017196-9464) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1141712 INFO  (qtp2012017196-9464) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1141712 INFO  (qtp2012017196-9464) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 4
   [junit4]   2> 1141764 INFO  (qtp224525690-9498) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1141764 INFO  (qtp224525690-9498) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1141767 INFO  (qtp224525690-9498) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1141767 INFO  (qtp224525690-9498) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:62569/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 3
   [junit4]   2> 1141778 INFO  (qtp1771294570-9568) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1141778 INFO  (qtp1771294570-9568) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1141779 INFO  (qtp1771294570-9568) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1141779 INFO  (qtp1771294570-9568) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:62569/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 1
   [junit4]   2> 1141780 INFO  (qtp1998988379-9531) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1141780 INFO  (qtp1998988379-9531) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1141780 INFO  (qtp1998988379-9531) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1141780 INFO  (qtp1998988379-9531) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:62569/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 0
   [junit4]   2> 1141781 INFO  (qtp224525690-9503) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={_stateVer_=collection1:11&waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 57
   [junit4]   2> 1141792 INFO  (qtp224525690-9504) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 1141802 INFO  (qtp1998988379-9534) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 1141817 INFO  (qtp1771294570-9564) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 1143827 INFO  (qtp2012017196-9465) [n:127.0.0.1:62562_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556705178971799552)} 0 3
   [junit4]   2> 1143836 INFO  (qtp1771294570-9566) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&_version_=-1556705178977042432&distrib.from=https://127.0.0.1:62569/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556705178977042432)} 0 3
   [junit4]   2> 1143837 INFO  (qtp1998988379-9535) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&_version_=-1556705178977042432&distrib.from=https://127.0.0.1:62569/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556705178977042432)} 0 4
   [junit4]   2> 1143838 INFO  (qtp224525690-9505) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556705178977042432)} 0 9
   [junit4]   2> 1143849 INFO  (qtp1771294570-9567) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:62569/collection1/&wt=javabin&version=2}{add=[0 (1556705178990673920)]} 0 3
   [junit4]   2> 1143849 INFO  (qtp1998988379-9536) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:62569/collection1/&wt=javabin&version=2}{add=[0 (1556705178990673920)]} 0 3
   [junit4]   2> 1143851 INFO  (qtp224525690-9498) [n:127.0.0.1:62569_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={_stateVer_=collection1:11&wt=javabin&version=2}{add=[0 (1556705178990673920)]} 0 8
   [junit4]   2> 1143860 INFO  (qtp1771294570-9568) [n:127.0.0.1:62604_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=https:/

[...truncated too long message...]

8_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy RecoveryStrategy has been closed
   [junit4]   2> 1333213 INFO  (recoveryExecutor-1990-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Finished recovery process, successful=[false]
   [junit4]   2> 1333213 INFO  (recoveryExecutor-1990-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@5d3431ab
   [junit4]   2> 1333213 INFO  (zkCallback-1992-thread-2-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1333214 WARN  (recoveryExecutor-1990-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1] coreNodeName=[core_node2]
   [junit4]   2> 1333242 INFO  (recoveryExecutor-1990-thread-1-processing-n:127.0.0.1:62588_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:62588_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 1333242 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.Overseer Overseer (id=97294072490426387-127.0.0.1:62588_-n_0000000004) closing
   [junit4]   2> 1333242 INFO  (OverseerStateUpdate-97294072490426387-127.0.0.1:62588_-n_0000000004) [n:127.0.0.1:62588_    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:62588_
   [junit4]   2> 1334755 WARN  (zkCallback-1992-thread-2-processing-n:127.0.0.1:62588_) [n:127.0.0.1:62588_    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 1334755 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 1334757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@2a6ef246{SSL,[ssl, http/1.1]}{127.0.0.1:62588}
   [junit4]   2> 1334757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@6a669cf{/,null,UNAVAILABLE}
   [junit4]   2> 1334758 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 62604
   [junit4]   2> 1334759 INFO  (TEST-PeerSyncReplicationTest.test-seed#[7150ECE2BD11873D]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:62557 62557
   [junit4]   2> 1335936 INFO  (Thread-1430) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:62557 62557
   [junit4]   2> 1338020 WARN  (Thread-1430) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/aliases.json
   [junit4]   2> 	5	/solr/security.json
   [junit4]   2> 	5	/solr/configs/conf1
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/clusterstate.json
   [junit4]   2> 	6	/solr/clusterprops.json
   [junit4]   2> 	4	/solr/collections/collection1/state.json
   [junit4]   2> 	2	/solr/collections/collection1/leader_elect/shard1/election/97294072490426378-core_node1-n_0000000000
   [junit4]   2> 	2	/solr/overseer_elect/election/97294072490426378-127.0.0.1:62569_-n_0000000001
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	207	/solr/overseer/collection-queue-work
   [junit4]   2> 	40	/solr/overseer/queue
   [junit4]   2> 	15	/solr/overseer/queue-work
   [junit4]   2> 	6	/solr/collections
   [junit4]   2> 	5	/solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=PeerSyncReplicationTest -Dtests.method=test -Dtests.seed=7150ECE2BD11873D -Dtests.slow=true -Dtests.locale=it-IT -Dtests.timezone=Australia/Broken_Hill -Dtests.asserts=true -Dtests.file.encoding=ISO-8859-1
   [junit4] FAILURE  212s J1 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: timeout waiting to see all nodes active
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([7150ECE2BD11873D:F904D33813EDEAC5]:0)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 1338023 INFO  (SUITE-PeerSyncReplicationTest-seed#[7150ECE2BD11873D]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_7150ECE2BD11873D-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): {other_tl1=Lucene50(blocksize=128), range_facet_l_dv=PostingsFormat(name=Memory), rnd_s=Lucene50(blocksize=128), multiDefault=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), intDefault=Lucene50(blocksize=128), a_i1=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), range_facet_l=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), _version_=Lucene50(blocksize=128), a_t=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), id=PostingsFormat(name=Memory), range_facet_i_dv=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), text=PostingsFormat(name=LuceneFixedGap), timestamp=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128)))}, docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene70), range_facet_i_dv=DocValuesFormat(name=Direct), timestamp=DocValuesFormat(name=Direct)}, maxPointsInLeafNode=734, maxMBSortInHeap=5.551656724994473, sim=RandomSimilarity(queryNorm=false): {}, locale=it-IT, timezone=Australia/Broken_Hill
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_102 (64-bit)/cpus=3,threads=1,free=91439784,total=269955072
   [junit4]   2> NOTE: All tests run in this JVM: [SliceStateTest, TestStressLiveNodes, BasicFunctionalityTest, BasicZkTest, QueryParsingTest, TestZkChroot, DeleteInactiveReplicaTest, RemoteQueryErrorTest, TestQueryUtils, TestUseDocValuesAsStored, MultiThreadedOCPTest, CdcrVersionReplicationTest, AtomicUpdatesTest, JSONWriterTest, TestJmxMonitoredMap, HLLUtilTest, SuggesterTSTTest, TestTrie, TestTrieFacet, TestAuthenticationFramework, TestUnifiedSolrHighlighter, TestDeleteCollectionOnDownNodes, ScriptEngineTest, LukeRequestHandlerTest, DistributedFacetPivotSmallAdvancedTest, TestFieldCache, BufferStoreTest, TestSolrQueryParserDefaultOperatorResource, SpellPossibilityIteratorTest, TestReversedWildcardFilterFactory, TestConfigReload, TestNonDefinedSimilarityFactory, HdfsChaosMonkeyNothingIsSafeTest, UUIDFieldTest, TestScoreJoinQPScore, TestPushWriter, PreAnalyzedFieldManagedSchemaCloudTest, TestBinaryResponseWriter, SolrCloudExampleTest, BasicDistributedZk2Test, TestFreeTextSuggestions, SpellingQueryConverterTest, ConjunctionSolrSpellCheckerTest, TestCollapseQParserPlugin, ExternalFileFieldSortTest, SolrMetricReporterTest, TestMiniSolrCloudCluster, TestDistribIDF, TestDistributedMissingSort, TestManagedSchemaAPI, RulesTest, TestUninvertingReader, TestSchemaResource, SpellCheckComponentTest, SimpleCollectionCreateDeleteTest, DateRangeFieldTest, DistributedTermsComponentTest, CurrencyFieldOpenExchangeTest, ClusterStateTest, TestSubQueryTransformerCrossCore, SolrIndexMetricsTest, TestSolrConfigHandlerConcurrent, FullHLLTest, TestSolrDynamicMBean, DistanceUnitsTest, DataDrivenBlockJoinTest, HdfsUnloadDistributedZkTest, TestDocSet, TestCodecSupport, HighlighterConfigTest, VersionInfoTest, TestMaxScoreQueryParser, TestAnalyzedSuggestions, BadComponentTest, DistanceFunctionTest, TestLMDirichletSimilarityFactory, SaslZkACLProviderTest, TestGraphMLResponseWriter, HdfsRestartWhileUpdatingTest, WrapperMergePolicyFactoryTest, OverseerModifyCollectionTest, CollectionsAPIDistributedZkTest, TestReplicationHandlerBackup, TestSearcherReuse, ZkStateReaderTest, TestRandomFaceting, TestOmitPositions, SuggestComponentContextFilterQueryTest, JavabinLoaderTest, CreateCollectionCleanupTest, TestGraphTermsQParserPlugin, TestSolrCloudSnapshots, BitVectorTest, AsyncCallRequestStatusResponseTest, PreAnalyzedUpdateProcessorTest, TestCorePropertiesReload, SolrGangliaReporterTest, RequestLoggingTest, SignatureUpdateProcessorFactoryTest, TestSolrQueryParser, PeerSyncReplicationTest]
   [junit4] Completed [199/678 (1!)] on J1 in 212.47s, 1 test, 1 failure <<< FAILURES!

[...truncated 64021 lines...]


[JENKINS] Lucene-Solr-master-MacOSX (64bit/jdk1.8.0) - Build # 3782 - Still Unstable!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3782/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
timeout waiting to see all nodes active

Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
	at __randomizedtesting.SeedInfo.seed([3A83B26FB49563E2:B2D78DB51A690E1A]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
	at org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
	at org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 10976 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/init-core-data-001
   [junit4]   2> 416428 INFO  (SUITE-PeerSyncReplicationTest-seed#[3A83B26FB49563E2]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 416428 INFO  (SUITE-PeerSyncReplicationTest-seed#[3A83B26FB49563E2]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 416430 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 416430 INFO  (Thread-659) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 416430 INFO  (Thread-659) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 416532 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkTestServer start zk server on port:54772
   [junit4]   2> 416571 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 416575 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 416578 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 416582 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 416585 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 416588 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 416591 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 416594 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 416597 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 416600 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 416603 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 416893 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/control-001/cores/collection1
   [junit4]   2> 416895 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 416897 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@47cdecdb{/,null,AVAILABLE}
   [junit4]   2> 416902 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@779c2433{HTTP/1.1,[http/1.1]}{127.0.0.1:54775}
   [junit4]   2> 416902 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server Started @422322ms
   [junit4]   2> 416902 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/tempDir-001/control/data, hostContext=/, hostPort=54775, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/control-001/cores}
   [junit4]   2> 416903 ERROR (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 416903 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 416903 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 416903 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 416903 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T09:02:49.629Z
   [junit4]   2> 416908 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 416908 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/control-001/solr.xml
   [junit4]   2> 416924 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:54772/solr
   [junit4]   2> 416971 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54775_    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:54775_
   [junit4]   2> 416973 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54775_    ] o.a.s.c.Overseer Overseer (id=97291951745138692-127.0.0.1:54775_-n_0000000000) starting
   [junit4]   2> 416987 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54775_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:54775_
   [junit4]   2> 416990 INFO  (zkCallback-498-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 417050 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54775_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/control-001/cores
   [junit4]   2> 417050 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54775_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 417058 INFO  (OverseerStateUpdate-97291951745138692-127.0.0.1:54775_-n_0000000000) [n:127.0.0.1:54775_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 418083 WARN  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 418084 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 418098 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 418176 WARN  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 418178 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 418198 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection control_collection
   [junit4]   2> 418198 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/control-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/control-001/cores/collection1/data/]
   [junit4]   2> 418199 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@38a7f3e
   [junit4]   2> 418202 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=43, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.18415998560661315]
   [junit4]   2> 418213 WARN  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 418231 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 418231 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 418232 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 418232 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 418232 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=46, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.155833533349231]
   [junit4]   2> 418233 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1d05f8ad[collection1] main]
   [junit4]   2> 418235 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 418236 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 418236 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 418237 INFO  (searcherExecutor-1532-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1d05f8ad[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 418237 INFO  (coreLoadExecutor-1531-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556671229814898688
   [junit4]   2> 418250 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 418250 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 418250 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:54775/collection1/
   [junit4]   2> 418250 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 418250 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:54775/collection1/ has no replicas
   [junit4]   2> 418258 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:54775/collection1/ shard1
   [junit4]   2> 418433 INFO  (coreZkRegister-1524-thread-1-processing-n:127.0.0.1:54775_ x:collection1 c:control_collection) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 418712 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 418714 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:54772/solr ready
   [junit4]   2> 418714 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 419048 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001/cores/collection1
   [junit4]   2> 419050 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001
   [junit4]   2> 419051 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 419052 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7f62e700{/,null,AVAILABLE}
   [junit4]   2> 419054 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7df6cb96{HTTP/1.1,[http/1.1]}{127.0.0.1:54784}
   [junit4]   2> 419054 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server Started @424474ms
   [junit4]   2> 419054 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/, hostPort=54784, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001/cores}
   [junit4]   2> 419054 ERROR (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 419055 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 419055 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 419055 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 419055 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T09:02:51.781Z
   [junit4]   2> 419060 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 419060 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001/solr.xml
   [junit4]   2> 419078 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:54772/solr
   [junit4]   2> 419108 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54784_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 419125 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54784_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:54784_
   [junit4]   2> 419128 INFO  (zkCallback-498-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 419128 INFO  (zkCallback-502-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 419128 INFO  (zkCallback-507-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 419176 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54784_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001/cores
   [junit4]   2> 419176 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54784_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 419180 INFO  (OverseerStateUpdate-97291951745138692-127.0.0.1:54775_-n_0000000000) [n:127.0.0.1:54775_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 420209 WARN  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 420209 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 420223 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 420313 WARN  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 420315 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 420336 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 420337 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 420337 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@38a7f3e
   [junit4]   2> 420340 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=43, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.18415998560661315]
   [junit4]   2> 420352 WARN  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 420370 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 420370 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 420371 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 420371 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 420372 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=46, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.155833533349231]
   [junit4]   2> 420373 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1067c920[collection1] main]
   [junit4]   2> 420375 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 420375 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 420375 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 420376 INFO  (searcherExecutor-1543-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1067c920[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 420377 INFO  (coreLoadExecutor-1542-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556671232058851328
   [junit4]   2> 420388 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 420388 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 420388 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:54784/collection1/
   [junit4]   2> 420388 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 420388 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:54784/collection1/ has no replicas
   [junit4]   2> 420395 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:54784/collection1/ shard1
   [junit4]   2> 420559 INFO  (coreZkRegister-1537-thread-1-processing-n:127.0.0.1:54784_ x:collection1 c:collection1) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 421311 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001/cores/collection1
   [junit4]   2> 421314 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001
   [junit4]   2> 421314 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 421316 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@29ce0448{/,null,AVAILABLE}
   [junit4]   2> 421316 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@cba0b33{HTTP/1.1,[http/1.1]}{127.0.0.1:54793}
   [junit4]   2> 421316 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server Started @426736ms
   [junit4]   2> 421317 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/, hostPort=54793, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001/cores}
   [junit4]   2> 421317 ERROR (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 421318 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 421318 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 421318 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 421318 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T09:02:54.044Z
   [junit4]   2> 421322 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 421322 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001/solr.xml
   [junit4]   2> 421370 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:54772/solr
   [junit4]   2> 421391 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54793_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 421401 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54793_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:54793_
   [junit4]   2> 421405 INFO  (zkCallback-498-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 421406 INFO  (zkCallback-513-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 421405 INFO  (zkCallback-507-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 421405 INFO  (zkCallback-502-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 421457 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54793_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001/cores
   [junit4]   2> 421457 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54793_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 421460 INFO  (OverseerStateUpdate-97291951745138692-127.0.0.1:54775_-n_0000000000) [n:127.0.0.1:54775_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 422474 WARN  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 422474 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 422489 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 422569 WARN  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 422571 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 422591 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 422592 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 422592 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@38a7f3e
   [junit4]   2> 422596 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=43, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.18415998560661315]
   [junit4]   2> 422611 WARN  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 422629 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 422629 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 422630 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 422630 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 422631 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=46, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.155833533349231]
   [junit4]   2> 422632 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@2c5dcf74[collection1] main]
   [junit4]   2> 422634 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 422635 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 422635 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 422636 INFO  (searcherExecutor-1554-thread-1-processing-n:127.0.0.1:54793_ x:collection1 c:collection1) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@2c5dcf74[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 422637 INFO  (coreLoadExecutor-1553-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556671234428633088
   [junit4]   2> 422642 INFO  (coreZkRegister-1548-thread-1-processing-n:127.0.0.1:54793_ x:collection1 c:collection1) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 422643 INFO  (updateExecutor-510-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 422643 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 422643 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 422643 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 422643 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 422643 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:54784/collection1/] and I am [http://127.0.0.1:54793/collection1/]
   [junit4]   2> 422647 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:54784]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:54793_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 422651 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 422652 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 422652 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:54793_, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:54793","node_name":"127.0.0.1:54793_","state":"down"}
   [junit4]   2> 423411 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001/cores/collection1
   [junit4]   2> 423412 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001
   [junit4]   2> 423413 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 423414 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@416c1f82{/,null,AVAILABLE}
   [junit4]   2> 423415 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@db9b4b3{HTTP/1.1,[http/1.1]}{127.0.0.1:54804}
   [junit4]   2> 423415 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.Server Started @428835ms
   [junit4]   2> 423415 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/, hostPort=54804, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001/cores}
   [junit4]   2> 423416 ERROR (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 423416 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 423416 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 423416 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 423416 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-16T09:02:56.142Z
   [junit4]   2> 423421 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 423421 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001/solr.xml
   [junit4]   2> 423440 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:54772/solr
   [junit4]   2> 423458 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54804_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 423467 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54804_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:54804_
   [junit4]   2> 423470 INFO  (zkCallback-498-thread-1-processing-n:127.0.0.1:54775_) [n:127.0.0.1:54775_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 423470 INFO  (zkCallback-502-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 423470 INFO  (zkCallback-507-thread-1-processing-n:127.0.0.1:54784_) [n:127.0.0.1:54784_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 423471 INFO  (zkCallback-513-thread-1-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 423472 INFO  (zkCallback-520-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 423499 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54804_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001/cores
   [junit4]   2> 423499 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [n:127.0.0.1:54804_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 423503 INFO  (OverseerStateUpdate-97291951745138692-127.0.0.1:54775_-n_0000000000) [n:127.0.0.1:54775_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 423652 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:54793_, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:54793","node_name":"127.0.0.1:54793_","state":"recovering"}
   [junit4]   2> 423652 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 423652 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:54793_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1001
   [junit4]   2> 424521 WARN  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 424522 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 424543 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 424646 WARN  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 424648 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 424681 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 424682 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 424682 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@38a7f3e
   [junit4]   2> 424687 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=43, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.18415998560661315]
   [junit4]   2> 424702 WARN  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 424729 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 424729 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 424730 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 424730 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 424731 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=46, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.155833533349231]
   [junit4]   2> 424748 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@36ac8a16[collection1] main]
   [junit4]   2> 424750 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 424750 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 424751 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 424760 INFO  (searcherExecutor-1565-thread-1-processing-n:127.0.0.1:54804_ x:collection1 c:collection1) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@36ac8a16[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 424761 INFO  (coreLoadExecutor-1564-thread-1-processing-n:127.0.0.1:54804_) [n:127.0.0.1:54804_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556671236655808512
   [junit4]   2> 424769 INFO  (coreZkRegister-1559-thread-1-processing-n:127.0.0.1:54804_ x:collection1 c:collection1) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 424770 INFO  (updateExecutor-517-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 424770 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 424771 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 424771 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 424771 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 424771 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:54784/collection1/] and I am [http://127.0.0.1:54804/collection1/]
   [junit4]   2> 424774 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:54784]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:54804_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 424776 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 424776 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 424776 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:54804_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:54804","node_name":"127.0.0.1:54804_","state":"down"}
   [junit4]   2> 425143 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 425143 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 for each attempt
   [junit4]   2> 425143 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 425777 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:54804_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:54804","node_name":"127.0.0.1:54804_","state":"recovering"}
   [junit4]   2> 425777 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 425777 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:54804_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1001
   [junit4]   2> 430653 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:54784/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 430653 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:54793 START replicas=[http://127.0.0.1:54784/collection1/] nUpdates=1000
   [junit4]   2> 430657 INFO  (qtp684236516-2958) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 430657 INFO  (qtp684236516-2958) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 430658 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 430658 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 430658 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 430658 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 430659 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 430659 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 430659 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 430659 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 430659 INFO  (recoveryExecutor-511-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 432778 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:54784/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 432778 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:54804 START replicas=[http://127.0.0.1:54784/collection1/] nUpdates=1000
   [junit4]   2> 432781 INFO  (qtp684236516-2959) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 432781 INFO  (qtp684236516-2959) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 432782 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 432782 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 432782 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 432782 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 432783 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 432783 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 432783 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 432783 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 432783 INFO  (recoveryExecutor-518-thread-1-processing-n:127.0.0.1:54804_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 433148 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 433152 INFO  (qtp1811798008-2920) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 433152 INFO  (qtp1811798008-2920) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 433153 INFO  (qtp1811798008-2920) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 433154 INFO  (qtp1811798008-2920) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 1
   [junit4]   2> 433164 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 433164 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 433165 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 433165 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 433165 INFO  (qtp365812254-3020) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 433165 INFO  (qtp365812254-3020) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 433165 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 433165 INFO  (qtp365812254-3020) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 433167 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:54784/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 433168 INFO  (qtp365812254-3020) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:54784/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 3
   [junit4]   2> 433169 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 433170 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:54784/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 8
   [junit4]   2> 433170 INFO  (qtp684236516-2962) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 15
   [junit4]   2> 433173 INFO  (qtp684236516-2955) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 433175 INFO  (qtp3170611-2986) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 433176 INFO  (qtp365812254-3022) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 435182 INFO  (qtp1811798008-2921) [n:127.0.0.1:54775_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556671247578824704)} 0 4
   [junit4]   2> 435192 INFO  (qtp3170611-2986) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&_version_=-1556671247584067584&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556671247584067584)} 0 3
   [junit4]   2> 435193 INFO  (qtp365812254-3023) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&_version_=-1556671247584067584&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556671247584067584)} 0 4
   [junit4]   2> 435193 INFO  (qtp684236516-2958) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556671247584067584)} 0 10
   [junit4]   2> 435205 INFO  (qtp365812254-3024) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[0 (1556671247599796224)]} 0 2
   [junit4]   2> 435205 INFO  (qtp3170611-2989) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[0 (1556671247599796224)]} 0 3
   [junit4]   2> 435206 INFO  (qtp684236516-2960) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[0 (1556671247599796224)]} 0 7
   [junit4]   2> 435209 INFO  (qtp3170611-2990) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[1 (1556671247609233408)]} 0 0
   [junit4]   2> 435209 INFO  (qtp365812254-3025) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[1 (1556671247609233408)]} 0 0
   [junit4]   2> 435210 INFO  (qtp684236516-2959) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[1 (1556671247609233408)]} 0 2
   [junit4]   2> 435212 INFO  (qtp365812254-3018) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[2 (1556671247613427712)]} 0 0
   [junit4]   2> 435212 INFO  (qtp3170611-2983) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[2 (1556671247613427712)]} 0 0
   [junit4]   2> 435212 INFO  (qtp684236516-2961) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[2 (1556671247613427712)]} 0 1
   [junit4]   2> 435214 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[3 (1556671247615524864)]} 0 0
   [junit4]   2> 435214 INFO  (qtp365812254-3020) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[3 (1556671247615524864)]} 0 0
   [junit4]   2> 435215 INFO  (qtp684236516-2957) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[3 (1556671247615524864)]} 0 1
   [junit4]   2> 435216 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[4 (1556671247617622016)]} 0 0
   [junit4]   2> 435217 INFO  (qtp365812254-3021) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[4 (1556671247617622016)]} 0 0
   [junit4]   2> 435218 INFO  (qtp684236516-2962) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[4 (1556671247617622016)]} 0 2
   [junit4]   2> 435220 INFO  (qtp365812254-3021) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[5 (1556671247620767744)]} 0 0
   [junit4]   2> 435220 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[5 (1556671247620767744)]} 0 0
   [junit4]   2> 435221 INFO  (qtp684236516-2955) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[5 (1556671247620767744)]} 0 2
   [junit4]   2> 435222 INFO  (qtp3170611-2985) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[6 (1556671247623913472)]} 0 0
   [junit4]   2> 435223 INFO  (qtp365812254-3021) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[6 (1556671247623913472)]} 0 0
   [junit4]   2> 435223 INFO  (qtp684236516-2958) [n:127.0.0.1:54784_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{add=[6 (1556671247623913472)]} 0 1
   [junit4]   2> 435225 INFO  (qtp3170611-2989) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[7 (1556671247627059200)]} 0 0
   [junit4]   2> 435226 INFO  (qtp365812254-3024) [n:127.0.0.1:54804_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:54784/collection1/&wt=javabin&version=2}{add=[7 (1556671247627059200)]} 0 0
   [junit4]   2> 435227 INFO  

[...truncated too long message...]

   2> 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:134)
   [junit4]   2> 	at org.eclipse.jetty.server.Server.handle(Server.java:534)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:320)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:251)
   [junit4]   2> 	at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:273)
   [junit4]   2> 	at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:95)
   [junit4]   2> 	at org.eclipse.jetty.io.SelectChannelEndPoint$2.run(SelectChannelEndPoint.java:93)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.executeProduceConsume(ExecuteProduceConsume.java:303)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.produceConsume(ExecuteProduceConsume.java:148)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.strategy.ExecuteProduceConsume.run(ExecuteProduceConsume.java:136)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:671)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:589)
   [junit4]   2> 	at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 
   [junit4]   2> 621892 INFO  (qtp684236516-3114) [n:127.0.0.1:54784_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:54793_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=400 QTime=113621
   [junit4]   2> 621892 INFO  (qtp684236516-3117) [n:127.0.0.1:54784_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:54793_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=400 QTime=103614
   [junit4]   2> 621899 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 54793
   [junit4]   2> 621899 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=1567797234
   [junit4]   2> 621900 WARN  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1] coreNodeName=[core_node2]
   [junit4]   2> 621904 WARN  (updateExecutor-524-thread-2-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Skipping recovery because Solr is shutdown
   [junit4]   2> 626203 INFO  (recoveryExecutor-525-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy RecoveryStrategy has been closed
   [junit4]   2> 626203 INFO  (recoveryExecutor-525-thread-1-processing-n:127.0.0.1:54793_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Finished recovery process, successful=[false]
   [junit4]   2> 626204 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.Overseer Overseer (id=97291951745138705-127.0.0.1:54793_-n_0000000004) closing
   [junit4]   2> 626204 INFO  (OverseerStateUpdate-97291951745138705-127.0.0.1:54793_-n_0000000004) [n:127.0.0.1:54793_    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:54793_
   [junit4]   2> 626205 INFO  (zkCallback-527-thread-4-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@424011e1
   [junit4]   2> 626206 WARN  (zkCallback-527-thread-4-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1] coreNodeName=[core_node2]
   [junit4]   2> 626208 WARN  (zkCallback-527-thread-2-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 626408 INFO  (zkCallback-527-thread-4-processing-n:127.0.0.1:54793_) [n:127.0.0.1:54793_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 626409 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 626410 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@3ec2091c{HTTP/1.1,[http/1.1]}{127.0.0.1:54793}
   [junit4]   2> 626411 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@55c995e{/,null,UNAVAILABLE}
   [junit4]   2> 626412 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 54804
   [junit4]   2> 626413 INFO  (TEST-PeerSyncReplicationTest.test-seed#[3A83B26FB49563E2]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:54772 54772
   [junit4]   2> 626421 INFO  (Thread-659) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:54772 54772
   [junit4]   2> 634444 WARN  (Thread-659) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/aliases.json
   [junit4]   2> 	6	/solr/clusterprops.json
   [junit4]   2> 	5	/solr/security.json
   [junit4]   2> 	5	/solr/configs/conf1
   [junit4]   2> 	4	/solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/clusterstate.json
   [junit4]   2> 	2	/solr/overseer_elect/election/97291951745138696-127.0.0.1:54784_-n_0000000001
   [junit4]   2> 	2	/solr/collections/collection1/leader_elect/shard1/election/97291951745138696-core_node1-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	207	/solr/overseer/collection-queue-work
   [junit4]   2> 	45	/solr/overseer/queue
   [junit4]   2> 	7	/solr/overseer/queue-work
   [junit4]   2> 	6	/solr/collections
   [junit4]   2> 	5	/solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=PeerSyncReplicationTest -Dtests.method=test -Dtests.seed=3A83B26FB49563E2 -Dtests.slow=true -Dtests.locale=it-IT -Dtests.timezone=America/Guatemala -Dtests.asserts=true -Dtests.file.encoding=ISO-8859-1
   [junit4] FAILURE  218s J1 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: timeout waiting to see all nodes active
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([3A83B26FB49563E2:B2D78DB51A690E1A]:0)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 634448 INFO  (SUITE-PeerSyncReplicationTest-seed#[3A83B26FB49563E2]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_3A83B26FB49563E2-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70), sim=RandomSimilarity(queryNorm=true): {}, locale=it-IT, timezone=America/Guatemala
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_102 (64-bit)/cpus=3,threads=1,free=49485784,total=171249664
   [junit4]   2> NOTE: All tests run in this JVM: [ClassificationUpdateProcessorIntegrationTest, TestDistributedMissingSort, TestWriterPerf, NotRequiredUniqueKeyTest, SolrCloudExampleTest, TestFiltering, EchoParamsTest, UpdateParamsTest, OpenCloseCoreStressTest, ConjunctionSolrSpellCheckerTest, TestTrie, TestUniqueKeyFieldResource, PreAnalyzedFieldManagedSchemaCloudTest, TestShortCircuitedRequests, CoreAdminRequestStatusTest, SecurityConfHandlerTest, TestRealTimeGet, TestDownShardTolerantSearch, TestStandardQParsers, AnalyticsMergeStrategyTest, FieldAnalysisRequestHandlerTest, TestSolrDynamicMBean, CollectionReloadTest, TestIndexSearcher, DistribCursorPagingTest, HttpSolrCallGetCoreTest, DefaultValueUpdateProcessorTest, TestMiniSolrCloudClusterSSL, TestSolr4Spatial, TestCoreDiscovery, TestNRTOpen, BasicFunctionalityTest, FullHLLTest, CoreAdminHandlerTest, VersionInfoTest, HLLUtilTest, TestFieldCacheSanityChecker, SaslZkACLProviderTest, UUIDFieldTest, RegexBoostProcessorTest, DataDrivenBlockJoinTest, RulesTest, BJQParserTest, TestBinaryResponseWriter, SolrIndexMetricsTest, SuggesterTSTTest, TestDefaultSearchFieldResource, TestCloudInspectUtil, PeerSyncReplicationTest]
   [junit4] Completed [104/678 (1!)] on J1 in 218.04s, 1 test, 1 failure <<< FAILURES!

[...truncated 64328 lines...]


[JENKINS] Lucene-Solr-master-MacOSX (64bit/jdk1.8.0) - Build # 3781 - Still Unstable!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3781/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseParallelGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:64262/lq_fx/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:64258/lq_fx/collection1]>

Stack Trace:
java.lang.AssertionError: PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:64262/lq_fx/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:64258/lq_fx/collection1]>
	at __randomizedtesting.SeedInfo.seed([66390243AC03ACF0:EE6D3D9902FFC108]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.junit.Assert.failNotEquals(Assert.java:647)
	at org.junit.Assert.assertEquals(Assert.java:128)
	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:162)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 10693 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/init-core-data-001
   [junit4]   2> 112366 INFO  (SUITE-PeerSyncReplicationTest-seed#[66390243AC03ACF0]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 112366 INFO  (SUITE-PeerSyncReplicationTest-seed#[66390243AC03ACF0]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /lq_fx/
   [junit4]   2> 112369 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 112369 INFO  (Thread-80) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 112369 INFO  (Thread-80) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 112474 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkTestServer start zk server on port:64250
   [junit4]   2> 112508 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 112513 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 112517 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 112520 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 112523 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 112526 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 112529 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 112532 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 112535 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 112539 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 112542 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 112837 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/control-001/cores/collection1
   [junit4]   2> 112840 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 112843 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@253c16dd{/lq_fx,null,AVAILABLE}
   [junit4]   2> 112845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@3a9ddca{HTTP/1.1,[http/1.1]}{127.0.0.1:64253}
   [junit4]   2> 112845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server Started @118696ms
   [junit4]   2> 112845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/tempDir-001/control/data, hostContext=/lq_fx, hostPort=64253, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/control-001/cores}
   [junit4]   2> 112845 ERROR (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 112845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 112845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 112845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 112846 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T21:50:49.902Z
   [junit4]   2> 112852 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 112852 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/control-001/solr.xml
   [junit4]   2> 112866 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64250/solr
   [junit4]   2> 112918 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:64253_lq_fx
   [junit4]   2> 112920 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.Overseer Overseer (id=97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) starting
   [junit4]   2> 112940 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:64253_lq_fx
   [junit4]   2> 112942 INFO  (zkCallback-74-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 113026 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/control-001/cores
   [junit4]   2> 113026 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 113032 INFO  (OverseerStateUpdate-97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 114051 WARN  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 114051 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 114064 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 114138 WARN  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 114140 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 114162 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection control_collection
   [junit4]   2> 114184 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/control-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/control-001/cores/collection1/data/]
   [junit4]   2> 114184 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@754dfb5e
   [junit4]   2> 114188 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=50, maxMergeAtOnceExplicit=18, maxMergedSegmentMB=67.0595703125, floorSegmentMB=0.7548828125, forceMergeDeletesPctAllowed=21.08628022637968, segmentsPerTier=32.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 114201 WARN  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 114217 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 114217 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 114218 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 114218 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 114218 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=24, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.4692800368962414]
   [junit4]   2> 114220 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@59f78cb7[collection1] main]
   [junit4]   2> 114222 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 114223 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 114223 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 114224 INFO  (searcherExecutor-234-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@59f78cb7[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 114225 INFO  (coreLoadExecutor-233-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx c:control_collection   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556628951564025856
   [junit4]   2> 114238 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 114238 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 114238 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:64253/lq_fx/collection1/
   [junit4]   2> 114238 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 114238 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:64253/lq_fx/collection1/ has no replicas
   [junit4]   2> 114246 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:64253/lq_fx/collection1/ shard1
   [junit4]   2> 114402 INFO  (coreZkRegister-226-thread-1-processing-n:127.0.0.1:64253_lq_fx x:collection1 c:control_collection) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 114576 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 114578 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:64250/solr ready
   [junit4]   2> 114578 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 114878 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001/cores/collection1
   [junit4]   2> 114879 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001
   [junit4]   2> 114880 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 114882 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@337241d7{/lq_fx,null,AVAILABLE}
   [junit4]   2> 114882 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1c8d94c6{HTTP/1.1,[http/1.1]}{127.0.0.1:64258}
   [junit4]   2> 114883 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server Started @120735ms
   [junit4]   2> 114883 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/lq_fx, hostPort=64258, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001/cores}
   [junit4]   2> 114883 ERROR (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 114884 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 114884 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 114884 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 114884 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T21:50:51.940Z
   [junit4]   2> 114889 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 114889 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001/solr.xml
   [junit4]   2> 114912 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64250/solr
   [junit4]   2> 114934 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 114945 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:64258_lq_fx
   [junit4]   2> 114949 INFO  (zkCallback-74-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 114950 INFO  (zkCallback-83-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 114950 INFO  (zkCallback-78-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 114998 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001/cores
   [junit4]   2> 114998 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 115002 INFO  (OverseerStateUpdate-97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 116028 WARN  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 116028 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 116041 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 116117 WARN  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 116121 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 116144 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 116144 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 116144 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@754dfb5e
   [junit4]   2> 116148 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=50, maxMergeAtOnceExplicit=18, maxMergedSegmentMB=67.0595703125, floorSegmentMB=0.7548828125, forceMergeDeletesPctAllowed=21.08628022637968, segmentsPerTier=32.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 116174 WARN  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 116193 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 116193 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 116194 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 116194 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 116195 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=24, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.4692800368962414]
   [junit4]   2> 116198 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1fc6449f[collection1] main]
   [junit4]   2> 116200 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 116201 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 116201 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 116202 INFO  (searcherExecutor-245-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1fc6449f[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 116203 INFO  (coreLoadExecutor-244-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556628953638109184
   [junit4]   2> 116215 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 116215 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 116215 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:64258/lq_fx/collection1/
   [junit4]   2> 116215 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 116215 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:64258/lq_fx/collection1/ has no replicas
   [junit4]   2> 116222 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:64258/lq_fx/collection1/ shard1
   [junit4]   2> 116379 INFO  (coreZkRegister-239-thread-1-processing-n:127.0.0.1:64258_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 116849 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001/cores/collection1
   [junit4]   2> 116851 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001
   [junit4]   2> 116852 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 116854 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5689c452{/lq_fx,null,AVAILABLE}
   [junit4]   2> 116854 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@109335b1{HTTP/1.1,[http/1.1]}{127.0.0.1:64262}
   [junit4]   2> 116854 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server Started @122706ms
   [junit4]   2> 116854 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/lq_fx, hostPort=64262, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001/cores}
   [junit4]   2> 116855 ERROR (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 116856 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 116856 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 116856 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 116856 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T21:50:53.912Z
   [junit4]   2> 116862 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 116862 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001/solr.xml
   [junit4]   2> 116875 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64250/solr
   [junit4]   2> 116897 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 116907 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:64262_lq_fx
   [junit4]   2> 116910 INFO  (zkCallback-78-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 116911 INFO  (zkCallback-89-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 116910 INFO  (zkCallback-74-thread-3-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 116910 INFO  (zkCallback-83-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 116953 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001/cores
   [junit4]   2> 116953 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 116958 INFO  (OverseerStateUpdate-97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 117982 WARN  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 117983 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 117995 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 118060 WARN  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 118069 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 118091 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 118092 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 118092 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@754dfb5e
   [junit4]   2> 118095 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=50, maxMergeAtOnceExplicit=18, maxMergedSegmentMB=67.0595703125, floorSegmentMB=0.7548828125, forceMergeDeletesPctAllowed=21.08628022637968, segmentsPerTier=32.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 118108 WARN  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 118581 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 118581 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 118582 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 118582 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 118583 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=24, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.4692800368962414]
   [junit4]   2> 118585 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@5019506c[collection1] main]
   [junit4]   2> 118587 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 118588 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 118588 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 118589 INFO  (searcherExecutor-256-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@5019506c[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 118590 INFO  (coreLoadExecutor-255-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556628956141060096
   [junit4]   2> 118597 INFO  (coreZkRegister-250-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 118597 INFO  (updateExecutor-86-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 118598 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 118598 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 118598 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 118598 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 118598 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:64258/lq_fx/collection1/] and I am [http://127.0.0.1:64262/lq_fx/collection1/]
   [junit4]   2> 118601 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:64258/lq_fx]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:64262_lq_fx&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 118606 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 118607 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 118607 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:64262_lq_fx, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:64262/lq_fx","node_name":"127.0.0.1:64262_lq_fx","state":"recovering"}
   [junit4]   2> 118607 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true for: 0 seconds.
   [junit4]   2> 118607 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:64262_lq_fx&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 119290 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001/cores/collection1
   [junit4]   2> 119300 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001
   [junit4]   2> 119301 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 119305 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4a8f0564{/lq_fx,null,AVAILABLE}
   [junit4]   2> 119307 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@6a602a5c{HTTP/1.1,[http/1.1]}{127.0.0.1:64267}
   [junit4]   2> 119307 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.Server Started @125158ms
   [junit4]   2> 119307 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/lq_fx, hostPort=64267, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001/cores}
   [junit4]   2> 119308 ERROR (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 119308 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.0.0
   [junit4]   2> 119309 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 119309 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 119309 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T21:50:56.365Z
   [junit4]   2> 119317 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 119317 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001/solr.xml
   [junit4]   2> 119329 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:64250/solr
   [junit4]   2> 119353 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64267_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 119366 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64267_lq_fx    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:64267_lq_fx
   [junit4]   2> 119370 INFO  (zkCallback-83-thread-1-processing-n:127.0.0.1:64258_lq_fx) [n:127.0.0.1:64258_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 119371 INFO  (zkCallback-89-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 119371 INFO  (zkCallback-78-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 119371 INFO  (zkCallback-74-thread-2-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 119372 INFO  (zkCallback-96-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 119436 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64267_lq_fx    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001/cores
   [junit4]   2> 119436 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [n:127.0.0.1:64267_lq_fx    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 119442 INFO  (OverseerStateUpdate-97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 120459 WARN  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 120459 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 120472 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 120558 WARN  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 120560 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 120583 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 120583 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 120583 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@754dfb5e
   [junit4]   2> 120586 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=50, maxMergeAtOnceExplicit=18, maxMergedSegmentMB=67.0595703125, floorSegmentMB=0.7548828125, forceMergeDeletesPctAllowed=21.08628022637968, segmentsPerTier=32.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0
   [junit4]   2> 120599 WARN  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 120621 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 120621 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 120624 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 120624 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 120625 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=24, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.4692800368962414]
   [junit4]   2> 120636 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@40f47f73[collection1] main]
   [junit4]   2> 120638 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 120639 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 120639 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 120640 INFO  (searcherExecutor-267-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@40f47f73[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 120641 INFO  (coreLoadExecutor-266-thread-1-processing-n:127.0.0.1:64267_lq_fx) [n:127.0.0.1:64267_lq_fx c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556628958291689472
   [junit4]   2> 120647 INFO  (coreZkRegister-261-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 c:collection1) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 120648 INFO  (updateExecutor-93-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 120648 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 120648 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 120649 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 120649 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 120649 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:64258/lq_fx/collection1/] and I am [http://127.0.0.1:64267/lq_fx/collection1/]
   [junit4]   2> 120651 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:64258/lq_fx]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:64267_lq_fx&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 120653 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 120654 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 120654 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:64267_lq_fx, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:64267/lq_fx","node_name":"127.0.0.1:64267_lq_fx","state":"down"}
   [junit4]   2> 120980 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 120980 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 for each attempt
   [junit4]   2> 120980 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 121659 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:64267_lq_fx, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:64267/lq_fx","node_name":"127.0.0.1:64267_lq_fx","state":"recovering"}
   [junit4]   2> 121659 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 121659 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:64267_lq_fx&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1005
   [junit4]   2> 125614 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:64258/lq_fx/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 125614 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:64262/lq_fx START replicas=[http://127.0.0.1:64258/lq_fx/collection1/] nUpdates=1000
   [junit4]   2> 125618 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 125618 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/lq_fx path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 125619 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 125619 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 125619 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 125619 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 125620 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 125620 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 125620 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 125620 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 125620 INFO  (recoveryExecutor-87-thread-1-processing-n:127.0.0.1:64262_lq_fx x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 128670 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:64258/lq_fx/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 128671 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:64267/lq_fx START replicas=[http://127.0.0.1:64258/lq_fx/collection1/] nUpdates=1000
   [junit4]   2> 128673 INFO  (qtp1889428961-488) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 128673 INFO  (qtp1889428961-488) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/lq_fx path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 128675 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 128675 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 128675 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 128675 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 128676 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 128676 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 128676 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 128676 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 128676 INFO  (recoveryExecutor-94-thread-1-processing-n:127.0.0.1:64267_lq_fx x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 129012 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 129016 INFO  (qtp461909716-449) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 129016 INFO  (qtp461909716-449) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 129018 INFO  (qtp461909716-449) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 129018 INFO  (qtp461909716-449) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 2
   [junit4]   2> 129024 INFO  (qtp1889428961-484) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 129024 INFO  (qtp1889428961-484) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 129025 INFO  (qtp1889428961-484) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 129026 INFO  (qtp1889428961-484) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 129027 INFO  (qtp1241384-549) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 129028 INFO  (qtp911078381-514) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 129028 INFO  (qtp1241384-549) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 129029 INFO  (qtp1241384-549) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 129029 INFO  (qtp911078381-514) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 129029 INFO  (qtp1241384-549) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 129030 INFO  (qtp911078381-514) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 129030 INFO  (qtp911078381-514) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 129032 INFO  (qtp1889428961-490) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 12
   [junit4]   2> 129035 INFO  (qtp1889428961-491) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/lq_fx path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 129037 INFO  (qtp911078381-515) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/lq_fx path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 129038 INFO  (qtp1241384-550) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request [collection1]  webapp=/lq_fx path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 131052 INFO  (qtp461909716-450) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556628969202122752)} 0 5
   [junit4]   2> 131058 INFO  (qtp1241384-551) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&_version_=-1556628969209462784&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556628969209462784)} 0 2
   [junit4]   2> 131060 INFO  (qtp911078381-515) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&_version_=-1556628969209462784&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556628969209462784)} 0 3
   [junit4]   2> 131060 INFO  (qtp1889428961-486) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556628969209462784)} 0 7
   [junit4]   2> 131085 INFO  (qtp911078381-517) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[0 (1556628969222045696)]} 0 12
   [junit4]   2> 131085 INFO  (qtp1241384-552) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[0 (1556628969222045696)]} 0 12
   [junit4]   2> 131086 INFO  (qtp1889428961-487) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{add=[0 (1556628969222045696)]} 0 21
   [junit4]   2> 131098 INFO  (qtp1241384-553) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[1 (1556628969246162944)]} 0 4
   [junit4]   2> 131101 INFO  (qtp911078381-518) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[1 (1556628969246162944)]} 0 3
   [junit4]   2> 131102 INFO  (qtp1889428961-488) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{add=[1 (1556628969246162944)]} 0 14
   [junit4]   2> 131105 INFO  (qtp911078381-519) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[2 (1556628969261891584)]} 0 0
   [junit4]   2> 131105 INFO  (qtp1241384-554) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[2 (1556628969261891584)]} 0 0
   [junit4]   2> 131106 INFO  (qtp1889428961-489) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{add=[2 (1556628969261891584)]} 0 3
   [junit4]   2> 131110 INFO  (qtp911078381-519) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[3 (1556628969268183040)]} 0 0
   [junit4]   2> 131111 INFO  (qtp1241384-554) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[3 (1556628969268183040)]} 0 0
   [junit4]   2> 131112 INFO  (qtp1889428961-484) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{add=[3 (1556628969268183040)]} 0 2
   [junit4]   2> 131114 INFO  (qtp911078381-519) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[4 (1556628969272377344)]} 0 0
   [junit4]   2> 131115 INFO  (qtp1241384-554) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[4 (1556628969272377344)]} 0 0
   [junit4]   2> 131115 INFO  (qtp1889428961-490) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{add=[4 (1556628969272377344)]} 0 2
   [junit4]   2> 131118 INFO  (qtp911078381-519) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[5 (1556628969275523072)]} 0 0
   [junit4]   2> 131118 INFO  (qtp1241384-554) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[5 (1556628969275523072)]} 0 0
   [junit4]   2> 131119 INFO  (qtp1889428961-491) [n:127.0.0.1:64258_lq_fx c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={wt=javabin&version=2}{add=[5 (1556628969275523072)]} 0 2
   [junit4]   2> 131122 INFO  (qtp911078381-519) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/lq_fx path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:64258/lq_fx/collection1/&wt=javabin&version=2}{add=[6 (1556628969279717376)]} 0 0
   [junit4]   2> 131122 INFO  (qtp1241384-554) [n:127.0.0.1:64267_lq_fx c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [coll

[...truncated too long message...]

ner instance=1879300464
   [junit4]   2> 149504 INFO  (coreCloseExecutor-289-thread-1) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@1dc5829e
   [junit4]   2> 149582 INFO  (coreCloseExecutor-289-thread-1) [n:127.0.0.1:64253_lq_fx c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 149583 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.Overseer Overseer (id=97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) closing
   [junit4]   2> 149583 INFO  (OverseerStateUpdate-97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:64253_lq_fx
   [junit4]   2> 149586 WARN  (zkCallback-74-thread-1-processing-n:127.0.0.1:64253_lq_fx) [n:127.0.0.1:64253_lq_fx    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 149587 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 149587 INFO  (zkCallback-103-thread-1-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1)
   [junit4]   2> 149588 INFO  (zkCallback-103-thread-3-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:64262_lq_fx
   [junit4]   2> 149588 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@3a9ddca{HTTP/1.1,[http/1.1]}{127.0.0.1:0}
   [junit4]   2> 149589 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@253c16dd{/lq_fx,null,UNAVAILABLE}
   [junit4]   2> 149590 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 64258
   [junit4]   2> 149590 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 64262
   [junit4]   2> 149590 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=790397287
   [junit4]   2> 149591 INFO  (zkCallback-103-thread-3-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.Overseer Overseer (id=97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) starting
   [junit4]   2> 149604 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.Overseer Overseer (id=97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) closing
   [junit4]   2> 149605 INFO  (OverseerStateUpdate-97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.Overseer According to ZK I (id=97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) am no longer a leader.
   [junit4]   2> 149605 INFO  (OverseerStateUpdate-97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:64262_lq_fx
   [junit4]   2> 149606 INFO  (OverseerCollectionConfigSetProcessor-97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.OverseerTaskProcessor According to ZK I (id=97289309351247889-127.0.0.1:64262_lq_fx-n_0000000004) am no longer a leader.
   [junit4]   2> 149608 WARN  (zkCallback-103-thread-3-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx    ] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for /live_nodes]
   [junit4]   2> 151935 WARN  (zkCallback-103-thread-2-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SyncStrategy Closed, skipping sync up.
   [junit4]   2> 151936 INFO  (zkCallback-103-thread-2-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@763b4dda
   [junit4]   2> 151995 INFO  (zkCallback-103-thread-2-processing-n:127.0.0.1:64262_lq_fx) [n:127.0.0.1:64262_lq_fx c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 151995 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 151997 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@439ca663{HTTP/1.1,[http/1.1]}{127.0.0.1:64262}
   [junit4]   2> 151997 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@1de1f76d{/lq_fx,null,UNAVAILABLE}
   [junit4]   2> 151998 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ChaosMonkey monkey: stop shard! 64267
   [junit4]   2> 152000 INFO  (TEST-PeerSyncReplicationTest.test-seed#[66390243AC03ACF0]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:64250 64250
   [junit4]   2> 153123 INFO  (Thread-80) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:64250 64250
   [junit4]   2> 157291 WARN  (Thread-80) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/aliases.json
   [junit4]   2> 	6	/solr/clusterprops.json
   [junit4]   2> 	5	/solr/security.json
   [junit4]   2> 	5	/solr/configs/conf1
   [junit4]   2> 	4	/solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/clusterstate.json
   [junit4]   2> 	2	/solr/collections/collection1/leader_elect/shard1/election/97289309351247880-core_node1-n_0000000000
   [junit4]   2> 	2	/solr/overseer_elect/election/97289309351247876-127.0.0.1:64253_lq_fx-n_0000000000
   [junit4]   2> 	2	/solr/overseer_elect/election/97289309351247880-127.0.0.1:64258_lq_fx-n_0000000001
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	37	/solr/overseer/collection-queue-work
   [junit4]   2> 	36	/solr/overseer/queue
   [junit4]   2> 	6	/solr/collections
   [junit4]   2> 	6	/solr/overseer/queue-work
   [junit4]   2> 	5	/solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=PeerSyncReplicationTest -Dtests.method=test -Dtests.seed=66390243AC03ACF0 -Dtests.slow=true -Dtests.locale=id -Dtests.timezone=America/Juneau -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] FAILURE 44.9s J1 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:64262/lq_fx/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:64258/lq_fx/collection1]>
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([66390243AC03ACF0:EE6D3D9902FFC108]:0)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:162)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 157301 INFO  (SUITE-PeerSyncReplicationTest-seed#[66390243AC03ACF0]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_66390243AC03ACF0-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): {other_tl1=PostingsFormat(name=Asserting), range_facet_l_dv=PostingsFormat(name=Direct), rnd_s=PostingsFormat(name=Asserting), multiDefault=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), intDefault=PostingsFormat(name=Asserting), a_i1=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), range_facet_l=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), _version_=PostingsFormat(name=Asserting), a_t=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), id=PostingsFormat(name=Direct), range_facet_i_dv=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), text=Lucene50(blocksize=128), timestamp=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128)))}, docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene70), range_facet_i_dv=DocValuesFormat(name=Memory), timestamp=DocValuesFormat(name=Memory)}, maxPointsInLeafNode=1706, maxMBSortInHeap=7.164267977330658, sim=RandomSimilarity(queryNorm=false): {}, locale=id, timezone=America/Juneau
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_102 (64-bit)/cpus=3,threads=1,free=126304736,total=287309824
   [junit4]   2> NOTE: All tests run in this JVM: [TestSuggestSpellingConverter, TestFieldSortValues, TestReload, SpellingQueryConverterTest, BasicFunctionalityTest, FullHLLTest, BigEndianAscendingWordDeserializerTest, AutoCommitTest, BasicDistributedZk2Test, PeerSyncReplicationTest]
   [junit4] Completed [18/678 (1!)] on J1 in 44.96s, 1 test, 1 failure <<< FAILURES!

[...truncated 64609 lines...]


[JENKINS] Lucene-Solr-master-MacOSX (64bit/jdk1.8.0) - Build # 3780 - Still Unstable!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3780/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseSerialGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:65090/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:65072/collection1]>

Stack Trace:
java.lang.AssertionError: PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:65090/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:65072/collection1]>
	at __randomizedtesting.SeedInfo.seed([72865ACCC763AB5A:FAD26516699FC6A2]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.junit.Assert.failNotEquals(Assert.java:647)
	at org.junit.Assert.assertEquals(Assert.java:128)
	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:162)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
	at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 12225 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/init-core-data-001
   [junit4]   2> 2933534 INFO  (SUITE-PeerSyncReplicationTest-seed#[72865ACCC763AB5A]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 2933535 INFO  (SUITE-PeerSyncReplicationTest-seed#[72865ACCC763AB5A]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 2933537 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 2933538 INFO  (Thread-3427) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 2933538 INFO  (Thread-3427) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 2933638 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkTestServer start zk server on port:65036
   [junit4]   2> 2933681 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 2933748 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 2933863 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 2933888 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 2933914 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 2933945 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 2933984 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 2934106 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 2934124 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 2934177 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 2934199 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractZkTestCase put /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 2940751 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/control-001/cores/collection1
   [junit4]   2> 2940753 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2940755 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5166932a{/,null,AVAILABLE}
   [junit4]   2> 2940756 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@50e6c192{HTTP/1.1,[http/1.1]}{127.0.0.1:65065}
   [junit4]   2> 2940757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server Started @2945888ms
   [junit4]   2> 2940757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/tempDir-001/control/data, hostContext=/, hostPort=65065, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/control-001/cores}
   [junit4]   2> 2940757 ERROR (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2940757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 2940757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2940757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2940757 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T13:08:37.386Z
   [junit4]   2> 2940763 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2940763 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/control-001/solr.xml
   [junit4]   2> 2940779 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:65036/solr
   [junit4]   2> 2940825 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65065_    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:65065_
   [junit4]   2> 2940828 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65065_    ] o.a.s.c.Overseer Overseer (id=97287255502028804-127.0.0.1:65065_-n_0000000000) starting
   [junit4]   2> 2940845 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65065_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:65065_
   [junit4]   2> 2940850 INFO  (zkCallback-3482-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2940954 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65065_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/control-001/cores
   [junit4]   2> 2940954 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65065_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2940959 INFO  (OverseerStateUpdate-97287255502028804-127.0.0.1:65065_-n_0000000000) [n:127.0.0.1:65065_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 2941977 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 2941988 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2942080 WARN  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2942082 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2942106 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection control_collection
   [junit4]   2> 2942107 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/control-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/control-001/cores/collection1/data/]
   [junit4]   2> 2942107 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2ecfb666
   [junit4]   2> 2942110 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=19, maxMergedSegmentMB=53.861328125, floorSegmentMB=2.001953125, forceMergeDeletesPctAllowed=24.710170746652157, segmentsPerTier=44.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.5909260113380911
   [junit4]   2> 2942137 WARN  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2942154 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2942154 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2942156 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2942156 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2942157 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=31, maxMergedSegmentMB=61.2568359375, floorSegmentMB=0.6806640625, forceMergeDeletesPctAllowed=12.492219559968152, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2942159 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@63802dad[collection1] main]
   [junit4]   2> 2942161 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 2942162 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2942162 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2942163 INFO  (searcherExecutor-7944-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@63802dad[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2942165 INFO  (coreLoadExecutor-7943-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_ c:control_collection   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556596097070137344
   [junit4]   2> 2942180 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2942180 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2942180 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:65065/collection1/
   [junit4]   2> 2942180 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 2942180 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:65065/collection1/ has no replicas
   [junit4]   2> 2942189 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:65065/collection1/ shard1
   [junit4]   2> 2942347 INFO  (coreZkRegister-7936-thread-1-processing-n:127.0.0.1:65065_ x:collection1 c:control_collection) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 2942521 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2942552 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:65036/solr ready
   [junit4]   2> 2942552 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 2942552 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Creating collection1 with stateFormat=2
   [junit4]   2> 2943058 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001/cores/collection1
   [junit4]   2> 2943060 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001
   [junit4]   2> 2943061 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2943063 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@688ee93b{/,null,AVAILABLE}
   [junit4]   2> 2943063 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@21351b43{HTTP/1.1,[http/1.1]}{127.0.0.1:65072}
   [junit4]   2> 2943064 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server Started @2948196ms
   [junit4]   2> 2943064 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/, hostPort=65072, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001/cores}
   [junit4]   2> 2943065 ERROR (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2943066 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 2943066 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2943066 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2943066 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T13:08:39.695Z
   [junit4]   2> 2943073 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2943073 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001/solr.xml
   [junit4]   2> 2943117 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:65036/solr
   [junit4]   2> 2943185 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2943196 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65072_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:65072_
   [junit4]   2> 2943203 INFO  (zkCallback-3482-thread-1-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2943214 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2943204 INFO  (zkCallback-3486-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2943256 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65072_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001/cores
   [junit4]   2> 2943256 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65072_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2943276 INFO  (OverseerStateUpdate-97287255502028804-127.0.0.1:65065_-n_0000000000) [n:127.0.0.1:65065_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 2943423 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 2944301 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 2944312 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2944382 WARN  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2944384 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2944403 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 2944404 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 2944404 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2ecfb666
   [junit4]   2> 2944406 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=19, maxMergedSegmentMB=53.861328125, floorSegmentMB=2.001953125, forceMergeDeletesPctAllowed=24.710170746652157, segmentsPerTier=44.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.5909260113380911
   [junit4]   2> 2944421 WARN  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2944437 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2944437 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2944438 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2944438 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2944439 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=31, maxMergedSegmentMB=61.2568359375, floorSegmentMB=0.6806640625, forceMergeDeletesPctAllowed=12.492219559968152, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2944440 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@5cf7360f[collection1] main]
   [junit4]   2> 2944442 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 2944442 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2944443 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2944444 INFO  (searcherExecutor-7955-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@5cf7360f[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2944445 INFO  (coreLoadExecutor-7954-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556596099460890624
   [junit4]   2> 2944458 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2944458 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2944458 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:65072/collection1/
   [junit4]   2> 2944458 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 2944458 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:65072/collection1/ has no replicas
   [junit4]   2> 2944466 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:65072/collection1/ shard1
   [junit4]   2> 2944575 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 2944631 INFO  (coreZkRegister-7949-thread-1-processing-n:127.0.0.1:65072_ x:collection1 c:collection1) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 2944748 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 2945281 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001/cores/collection1
   [junit4]   2> 2945283 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001
   [junit4]   2> 2945284 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2945287 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@8687513{/,null,AVAILABLE}
   [junit4]   2> 2945287 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@538dff5a{HTTP/1.1,[http/1.1]}{127.0.0.1:65090}
   [junit4]   2> 2945288 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server Started @2950419ms
   [junit4]   2> 2945288 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/, hostPort=65090, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001/cores}
   [junit4]   2> 2945288 ERROR (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2945289 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 2945289 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2945289 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2945289 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T13:08:41.918Z
   [junit4]   2> 2945294 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2945294 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001/solr.xml
   [junit4]   2> 2945305 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:65036/solr
   [junit4]   2> 2945345 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2945357 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65090_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:65090_
   [junit4]   2> 2945360 INFO  (zkCallback-3482-thread-3-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2945360 INFO  (zkCallback-3486-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2945360 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2945361 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2945415 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65090_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001/cores
   [junit4]   2> 2945415 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65090_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2945444 INFO  (OverseerStateUpdate-97287255502028804-127.0.0.1:65065_-n_0000000000) [n:127.0.0.1:65065_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 2945555 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2945555 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2946463 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 2946474 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2946589 WARN  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2946591 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2946641 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 2946642 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 2946642 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2ecfb666
   [junit4]   2> 2946647 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=19, maxMergedSegmentMB=53.861328125, floorSegmentMB=2.001953125, forceMergeDeletesPctAllowed=24.710170746652157, segmentsPerTier=44.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.5909260113380911
   [junit4]   2> 2946699 WARN  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2946731 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2946731 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2946732 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2946733 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2946734 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=31, maxMergedSegmentMB=61.2568359375, floorSegmentMB=0.6806640625, forceMergeDeletesPctAllowed=12.492219559968152, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2946736 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@66e2f23d[collection1] main]
   [junit4]   2> 2946738 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 2946739 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2946739 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2946742 INFO  (searcherExecutor-7966-thread-1-processing-n:127.0.0.1:65090_ x:collection1 c:collection1) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@66e2f23d[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2946743 INFO  (coreLoadExecutor-7965-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556596101870518272
   [junit4]   2> 2946750 INFO  (coreZkRegister-7960-thread-1-processing-n:127.0.0.1:65090_ x:collection1 c:collection1) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 2946751 INFO  (updateExecutor-3495-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 2946752 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 2946753 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 2946753 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 2946753 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 2946753 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:65072/collection1/] and I am [http://127.0.0.1:65090/collection1/]
   [junit4]   2> 2946757 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:65072]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:65090_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 2946762 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 2946763 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 2946763 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:65090_, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:65090","node_name":"127.0.0.1:65090_","state":"down"}
   [junit4]   2> 2946870 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2946870 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2947260 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001/cores/collection1
   [junit4]   2> 2947262 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001
   [junit4]   2> 2947263 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2947265 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7ffba7db{/,null,AVAILABLE}
   [junit4]   2> 2947265 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@6eb118e4{HTTP/1.1,[http/1.1]}{127.0.0.1:65101}
   [junit4]   2> 2947265 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.e.j.s.Server Started @2952397ms
   [junit4]   2> 2947265 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/, hostPort=65101, coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001/cores}
   [junit4]   2> 2947266 ERROR (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2947267 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.0.0
   [junit4]   2> 2947268 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2947268 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2947268 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2017-01-15T13:08:43.897Z
   [junit4]   2> 2947273 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2947273 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001/solr.xml
   [junit4]   2> 2947287 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:65036/solr
   [junit4]   2> 2947309 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65101_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 2947322 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65101_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:65101_
   [junit4]   2> 2947327 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2947327 INFO  (zkCallback-3482-thread-2-processing-n:127.0.0.1:65065_) [n:127.0.0.1:65065_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2947329 INFO  (zkCallback-3505-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2947329 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2947329 INFO  (zkCallback-3486-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2947438 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65101_    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001/cores
   [junit4]   2> 2947438 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [n:127.0.0.1:65101_    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2947441 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2947441 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2947447 INFO  (OverseerStateUpdate-97287255502028804-127.0.0.1:65065_-n_0000000000) [n:127.0.0.1:65065_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard shard=shard1
   [junit4]   2> 2947555 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2947555 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2947555 INFO  (zkCallback-3505-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2947764 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:65090_, coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: core_node2:{"core":"collection1","base_url":"http://127.0.0.1:65090","node_name":"127.0.0.1:65090_","state":"recovering"}
   [junit4]   2> 2947764 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 2947765 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:65090_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1002
   [junit4]   2> 2948466 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 2948526 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2948628 WARN  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.s.IndexSchema [collection1] default search field in schema is text. WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 2948630 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2948653 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 2948654 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001/cores/collection1], dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 2948654 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX Server: com.sun.jmx.mbeanserver.JmxMBeanServer@2ecfb666
   [junit4]   2> 2948659 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=29, maxMergeAtOnceExplicit=19, maxMergedSegmentMB=53.861328125, floorSegmentMB=2.001953125, forceMergeDeletesPctAllowed=24.710170746652157, segmentsPerTier=44.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.5909260113380911
   [junit4]   2> 2948683 WARN  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2948706 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2948706 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2948707 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2948707 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2948708 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=31, maxMergedSegmentMB=61.2568359375, floorSegmentMB=0.6806640625, forceMergeDeletesPctAllowed=12.492219559968152, segmentsPerTier=49.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2948710 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1d19d748[collection1] main]
   [junit4]   2> 2948713 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 2948713 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2948713 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2948715 INFO  (searcherExecutor-7977-thread-1-processing-n:127.0.0.1:65101_ x:collection1 c:collection1) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1d19d748[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2948716 INFO  (coreLoadExecutor-7976-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1556596103939358720
   [junit4]   2> 2948722 INFO  (coreZkRegister-7971-thread-1-processing-n:127.0.0.1:65101_ x:collection1 c:collection1) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 2948723 INFO  (updateExecutor-3502-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 2948723 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 2948723 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 2948723 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1]
   [junit4]   2> 2948724 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 2948724 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core [collection1] as recovering, leader is [http://127.0.0.1:65072/collection1/] and I am [http://127.0.0.1:65101/collection1/]
   [junit4]   2> 2948728 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:65072]; [WaitForState: action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:65101_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 2948729 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 2948731 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 (shard1 of collection1) have state: recovering
   [junit4]   2> 2948731 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:65101_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:65101","node_name":"127.0.0.1:65101_","state":"down"}
   [junit4]   2> 2948836 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 2948837 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 for each attempt
   [junit4]   2> 2948837 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 2948839 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2948839 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2948839 INFO  (zkCallback-3505-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2949734 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:65101_, coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: core_node3:{"core":"collection1","base_url":"http://127.0.0.1:65101","node_name":"127.0.0.1:65101_","state":"recovering"}
   [junit4]   2> 2949734 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_    ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 2949735 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:65101_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=1005
   [junit4]   2> 2954778 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:65072/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 2954778 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:65090 START replicas=[http://127.0.0.1:65072/collection1/] nUpdates=1000
   [junit4]   2> 2954796 INFO  (qtp640814278-18691) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 2954796 INFO  (qtp640814278-18691) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 2954803 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:5.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 2954803 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 2954803 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2954803 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2954805 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2954810 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 2954810 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 2954810 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 2954810 INFO  (recoveryExecutor-3496-thread-1-processing-n:127.0.0.1:65090_ x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 2954823 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2954823 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2954823 INFO  (zkCallback-3505-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2956737 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync from [http://127.0.0.1:65072/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 2956738 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 url=http://127.0.0.1:65101 START replicas=[http://127.0.0.1:65072/collection1/] nUpdates=1000
   [junit4]   2> 2956741 INFO  (qtp640814278-18687) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 2956741 INFO  (qtp640814278-18687) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/get params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 2956742 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, maxDoc=0}
   [junit4]   2> 2956742 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to do a PeerSync 
   [junit4]   2> 2956742 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2956742 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2956743 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2956743 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery was successful.
   [junit4]   2> 2956743 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered during PeerSync.
   [junit4]   2> 2956743 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 2956743 INFO  (recoveryExecutor-3503-thread-1-processing-n:127.0.0.1:65101_ x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active after recovery.
   [junit4]   2> 2956750 INFO  (zkCallback-3498-thread-1-processing-n:127.0.0.1:65090_) [n:127.0.0.1:65090_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2956750 INFO  (zkCallback-3505-thread-1-processing-n:127.0.0.1:65101_) [n:127.0.0.1:65101_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2956750 INFO  (zkCallback-3492-thread-1-processing-n:127.0.0.1:65072_) [n:127.0.0.1:65072_    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 2956884 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 2956892 INFO  (qtp1593857352-18646) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2956892 INFO  (qtp1593857352-18646) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2956895 INFO  (qtp1593857352-18646) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2956895 INFO  (qtp1593857352-18646) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 2
   [junit4]   2> 2956904 INFO  (qtp1660683109-18714) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2956904 INFO  (qtp1660683109-18714) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2956904 INFO  (qtp640814278-18684) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2956905 INFO  (qtp640814278-18684) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2956905 INFO  (qtp1660683109-18714) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2956906 INFO  (qtp1536248233-18749) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2956906 INFO  (qtp1660683109-18714) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:65072/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 2
   [junit4]   2> 2956907 INFO  (qtp640814278-18684) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2956908 INFO  (qtp1536248233-18749) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2956912 INFO  (qtp1536248233-18749) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2956912 INFO  (qtp640814278-18684) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:65072/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 8
   [junit4]   2> 2956913 INFO  (qtp1536248233-18749) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:65072/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=} 0 8
   [junit4]   2> 2956914 INFO  (qtp640814278-18688) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={_stateVer_=collection1:10&waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 15
   [junit4]   2> 2956919 INFO  (qtp640814278-18686) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 2956922 INFO  (qtp1660683109-18716) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 2956924 INFO  (qtp1536248233-18751) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request [collection1]  webapp= path=/select params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 2958934 INFO  (qtp1593857352-18647) [n:127.0.0.1:65065_ c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556596114651611136)} 0 2
   [junit4]   2> 2958940 INFO  (qtp1660683109-18716) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&_version_=-1556596114654756864&distrib.from=http://127.0.0.1:65072/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556596114654756864)} 0 3
   [junit4]   2> 2958941 INFO  (qtp1536248233-18752) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&_version_=-1556596114654756864&distrib.from=http://127.0.0.1:65072/collection1/&wt=javabin&version=2}{deleteByQuery=*:* (-1556596114654756864)} 0 3
   [junit4]   2> 2958941 INFO  (qtp640814278-18691) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1556596114654756864)} 0 6
   [junit4]   2> 2958950 INFO  (qtp1660683109-18718) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:65072/collection1/&wt=javabin&version=2}{add=[0 (1556596114667339776)]} 0 1
   [junit4]   2> 2958950 INFO  (qtp1536248233-18753) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:65072/collection1/&wt=javabin&version=2}{add=[0 (1556596114667339776)]} 0 1
   [junit4]   2> 2958951 INFO  (qtp640814278-18687) [n:127.0.0.1:65072_ c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={_stateVer_=collection1:10&wt=javabin&version=2}{add=[0 (1556596114667339776)]} 0 4
   [junit4]   2> 2958953 INFO  (qtp1660683109-18719) [n:127.0.0.1:65090_ c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:65072/collection1/&wt=javabin&version=2}{add=[1 (1556596114672582656)]} 0 0
   [junit4]   2> 2958953 INFO  (qtp1536248233-18754) [n:127.0.0.1:65101_ c:collection1 s:shard1 r:core_node3 x:col

[...truncated too long message...]

01
   [junit4]   2> 2980336 INFO  (TEST-PeerSyncReplicationTest.test-seed#[72865ACCC763AB5A]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:65036 65036
   [junit4]   2> 2980398 INFO  (Thread-3427) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:65036 65036
   [junit4]   2> 2986452 WARN  (Thread-3427) [    ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/aliases.json
   [junit4]   2> 	6	/solr/clusterprops.json
   [junit4]   2> 	5	/solr/security.json
   [junit4]   2> 	5	/solr/configs/conf1
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	6	/solr/clusterstate.json
   [junit4]   2> 	4	/solr/collections/collection1/state.json
   [junit4]   2> 	2	/solr/collections/collection1/leader_elect/shard1/election/97287255502028809-core_node1-n_0000000000
   [junit4]   2> 	2	/solr/overseer_elect/election/97287255502028809-127.0.0.1:65072_-n_0000000001
   [junit4]   2> 	2	/solr/overseer_elect/election/97287255502028804-127.0.0.1:65065_-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	41	/solr/overseer/queue
   [junit4]   2> 	38	/solr/overseer/collection-queue-work
   [junit4]   2> 	16	/solr/overseer/queue-work
   [junit4]   2> 	6	/solr/collections
   [junit4]   2> 	5	/solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=PeerSyncReplicationTest -Dtests.method=test -Dtests.seed=72865ACCC763AB5A -Dtests.slow=true -Dtests.locale=es-PE -Dtests.timezone=Singapore -Dtests.asserts=true -Dtests.file.encoding=US-ASCII
   [junit4] FAILURE 52.9s J1 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: PeerSynced node did not become leader expected:<CloudJettyRunner [url=http://127.0.0.1:65090/collection1]> but was:<CloudJettyRunner [url=http://127.0.0.1:65072/collection1]>
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([72865ACCC763AB5A:FAD26516699FC6A2]:0)
   [junit4]    > 	at org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:162)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    > 	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 2986477 INFO  (SUITE-PeerSyncReplicationTest-seed#[72865ACCC763AB5A]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: /Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_72865ACCC763AB5A-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): {other_tl1=Lucene50(blocksize=128), range_facet_l_dv=Lucene50(blocksize=128), rnd_s=Lucene50(blocksize=128), multiDefault=PostingsFormat(name=LuceneVarGapFixedInterval), intDefault=Lucene50(blocksize=128), a_i1=PostingsFormat(name=LuceneVarGapFixedInterval), range_facet_l=PostingsFormat(name=LuceneVarGapFixedInterval), _version_=Lucene50(blocksize=128), a_t=PostingsFormat(name=LuceneVarGapFixedInterval), id=Lucene50(blocksize=128), range_facet_i_dv=PostingsFormat(name=LuceneVarGapFixedInterval), text=PostingsFormat(name=Memory), timestamp=PostingsFormat(name=LuceneVarGapFixedInterval)}, docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene70), range_facet_i_dv=DocValuesFormat(name=Asserting), timestamp=DocValuesFormat(name=Asserting)}, maxPointsInLeafNode=774, maxMBSortInHeap=5.460910444089585, sim=RandomSimilarity(queryNorm=false): {}, locale=es-PE, timezone=Singapore
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_102 (64-bit)/cpus=3,threads=1,free=171559208,total=518979584
   [junit4]   2> NOTE: All tests run in this JVM: [TestCloudInspectUtil, RollingRestartTest, TestStandardQParsers, TestQueryWrapperFilter, TestConfigOverlay, DistributedIntervalFacetingTest, TestRestoreCore, LeaderElectionIntegrationTest, TestNoOpRegenerator, DistribJoinFromCollectionTest, SpellPossibilityIteratorTest, GraphQueryTest, LeaderInitiatedRecoveryOnShardRestartTest, TestRandomFaceting, TolerantUpdateProcessorTest, ResponseLogComponentTest, PreAnalyzedUpdateProcessorTest, TestFreeTextSuggestions, TestManagedResource, BasicDistributedZk2Test, TestSolrDynamicMBean, RemoteQueryErrorTest, UUIDFieldTest, AliasIntegrationTest, TestQueryUtils, TestSolrCoreProperties, ConjunctionSolrSpellCheckerTest, TestSearchPerf, TestSchemaResource, TestSolrDeletionPolicy2, ConvertedLegacyTest, TestFieldTypeResource, TestIndexingPerformance, TestSchemaVersionResource, PreAnalyzedFieldTest, TestReload, CdcrReplicationHandlerTest, TestFieldResource, SolrCmdDistributorTest, SharedFSAutoReplicaFailoverTest, OverriddenZkACLAndCredentialsProvidersTest, TestHdfsCloudBackupRestore, VersionInfoTest, TestFuzzyAnalyzedSuggestions, TestCodecSupport, ZkStateReaderTest, TestRestManager, AnalysisErrorHandlingTest, FieldAnalysisRequestHandlerTest, LoggingHandlerTest, SolrIndexSplitterTest, BufferStoreTest, TestNRTOpen, SolrCoreCheckLockOnStartupTest, TestSSLRandomization, TestBadConfig, TestSuggestSpellingConverter, TestRangeQuery, RequestLoggingTest, SolrIndexMetricsTest, SolrInfoMBeanTest, TestLocalFSCloudBackupRestore, DebugComponentTest, TestComponentsName, BitVectorTest, TestConfig, TestChildDocTransformer, TestInfoStreamLogging, TestDocSet, CleanupOldIndexTest, TestSolrCloudWithSecureImpersonation, TestDeleteCollectionOnDownNodes, BigEndianAscendingWordDeserializerTest, TestUninvertingReader, LukeRequestHandlerTest, OverseerStatusTest, MetricUtilsTest, TestBulkSchemaConcurrent, SpellCheckCollatorTest, TestRecoveryHdfs, TestSolrQueryParser, SolrCoreMetricManagerTest, TestRebalanceLeaders, TestDFRSimilarityFactory, TestDistributedGrouping, TestStressRecovery, ForceLeaderTest, TermVectorComponentDistributedTest, AsyncCallRequestStatusResponseTest, TestExportWriter, TestAnalyzedSuggestions, TriLevelCompositeIdRoutingTest, TestCrossCoreJoin, HdfsRestartWhileUpdatingTest, HdfsChaosMonkeyNothingIsSafeTest, BasicAuthIntegrationTest, TestNonDefinedSimilarityFactory, CircularListTest, DirectUpdateHandlerOptimizeTest, OpenCloseCoreStressTest, TestRequestForwarding, TestRecovery, TestLMDirichletSimilarityFactory, TestDistributedStatsComponentCardinality, SolrCloudExampleTest, TestFoldingMultitermQuery, CollectionsAPIDistributedZkTest, TestDynamicLoading, TestUtils, BJQParserTest, TestCloudPseudoReturnFields, TestTrieFacet, TestJsonFacets, NotRequiredUniqueKeyTest, TestWriterPerf, SimpleCollectionCreateDeleteTest, JSONWriterTest, ConnectionReuseTest, DateMathParserTest, BooleanFieldTest, TestRawTransformer, BasicZkTest, TestCoreDiscovery, AnalyticsMergeStrategyTest, UpdateParamsTest, TestConfigSetProperties, TestOrdValues, TestPushWriter, TestSha256AuthenticationProvider, CdcrBootstrapTest, TestFieldCacheSanityChecker, TestSimpleTrackingShardHandler, TestNumericTerms32, DistributedVersionInfoTest, TestUseDocValuesAsStored, ResourceLoaderTest, DataDrivenBlockJoinTest, TestSearcherReuse, TestImpersonationWithHadoopAuth, FullHLLTest, TestPivotHelperCode, StressHdfsTest, TestCollapseQParserPlugin, TestIndexSearcher, TestComplexPhraseQParserPlugin, TestDownShardTolerantSearch, TestAuthenticationFramework, TestConfigSetsAPI, SolrGangliaReporterTest, DistributedTermsComponentTest, RulesTest, TestCorePropertiesReload, DistributedQueryComponentOptimizationTest, CdcrVersionReplicationTest, SolrMetricReporterTest, DeleteInactiveReplicaTest, FieldMutatingUpdateProcessorTest, SuggesterWFSTTest, DateRangeFieldTest, BadIndexSchemaTest, TestMaxScoreQueryParser, ReturnFieldsTest, ExternalFileFieldSortTest, TestStressLiveNodes, BasicDistributedZkTest, ChaosMonkeySafeLeaderTest, UnloadDistributedZkTest, OverseerTest, FullSolrCloudDistribCmdsTest, ZkSolrClientTest, ShardRoutingCustomTest, TestFaceting, TestHashPartitioner, TestRealTimeGet, TestStressReorder, TestJoin, TestCoreContainer, TestSolr4Spatial, SpellCheckComponentTest, TestFiltering, TestFunctionQuery, DirectUpdateHandlerTest, SoftAutoCommitTest, CurrencyFieldXmlFileTest, SignatureUpdateProcessorFactoryTest, SuggesterFSTTest, SolrRequestParserTest, SuggesterTSTTest, TestTrie, SpatialFilterTest, PolyFieldTest, WordBreakSolrSpellCheckerTest, TestUpdate, DefaultValueUpdateProcessorTest, StatelessScriptUpdateProcessorFactoryTest, CacheHeaderTest, DirectSolrSpellCheckerTest, PrimitiveFieldTypeTest, TestOmitPositions, XmlUpdateRequestHandlerTest, TestValueSourceCache, TermVectorComponentTest, IndexSchemaRuntimeFieldTest, IndexSchemaTest, TestCSVResponseWriter, QueryParsingTest, TestPartialUpdateDeduplication, TestPHPSerializedResponseWriter, SOLR749Test, AlternateDirectoryTest, TestSolrIndexConfig, BadComponentTest, OutputWriterTest, TestSweetSpotSimilarityFactory, TestLMJelinekMercerSimilarityFactory, TestBM25SimilarityFactory, TestJmxMonitoredMap, TestSystemIdResolver, PrimUtilsTest, DOMUtilTest, ClusterStateTest, TestLRUCache, ZkNodePropsTest, FileUtilsTest, DistributedMLTComponentTest, TestCursorMarkWithoutUniqueKey, TestDistributedMissingSort, TestHighlightDedupGrouping, TestTolerantSearch, TestEmbeddedSolrServerConstructors, ActionThrottleTest, CdcrRequestHandlerTest, CollectionReloadTest, CollectionTooManyReplicasTest, ConfigSetsAPITest, CreateCollectionCleanupTest, DeleteLastCustomShardedReplicaTest, DeleteNodeTest, DeleteStatusTest, DistribDocExpirationUpdateProcessorTest, DistributedQueueTest, HttpPartitionTest, OutOfBoxZkACLAndCredentialsProvidersTest, OverseerCollectionConfigSetProcessorTest, OverseerModifyCollectionTest, PeerSyncReplicationTest]
   [junit4] Completed [495/678 (1!)] on J1 in 53.01s, 1 test, 1 failure <<< FAILURES!

[...truncated 63080 lines...]