You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/09/05 22:39:17 UTC

[JENKINS] Lucene-Solr-Tests-7.x - Build # 853 - Unstable

Build: https://builds.apache.org/job/Lucene-Solr-Tests-7.x/853/

1 tests failed.
FAILED:  org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitMixedReplicaTypes

Error Message:
unexpected shard state expected:<inactive> but was:<active>

Stack Trace:
java.lang.AssertionError: unexpected shard state expected:<inactive> but was:<active>
	at __randomizedtesting.SeedInfo.seed([E59D0883F6798B26:5D5E5C230AA25E53]:0)
	at org.junit.Assert.fail(Assert.java:93)
	at org.junit.Assert.failNotEquals(Assert.java:647)
	at org.junit.Assert.assertEquals(Assert.java:128)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.verifyShard(ShardSplitTest.java:372)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.doSplitMixedReplicaTypes(ShardSplitTest.java:364)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitMixedReplicaTypes(ShardSplitTest.java:331)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1742)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:935)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:971)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:985)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1008)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:983)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:944)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:830)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:880)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:891)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 14234 lines...]
   [junit4] Suite: org.apache.solr.cloud.api.collections.ShardSplitTest
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/init-core-data-001
   [junit4]   2> 1881175 INFO  (SUITE-ShardSplitTest-seed#[E59D0883F6798B26]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /g_lpz/
   [junit4]   2> 1881177 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1881177 INFO  (Thread-3867) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1881177 INFO  (Thread-3867) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 1881179 ERROR (Thread-3867) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 1881277 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkTestServer start zk server on port:35025
   [junit4]   2> 1881279 INFO  (zkConnectionManagerCallback-5494-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881282 INFO  (zkConnectionManagerCallback-5496-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881284 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 1881285 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/schema15.xml to /configs/conf1/schema.xml
   [junit4]   2> 1881287 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 1881288 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 1881288 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 1881289 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 1881290 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 1881291 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 1881292 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 1881293 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 1881293 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractZkTestCase put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 1881296 INFO  (zkConnectionManagerCallback-5500-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881297 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 1881412 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1881413 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1881413 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1881413 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1881415 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@21ce827c{/g_lpz,null,AVAILABLE}
   [junit4]   2> 1881416 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1020a7e7{SSL,[ssl, http/1.1]}{127.0.0.1:32935}
   [junit4]   2> 1881416 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.Server Started @1881478ms
   [junit4]   2> 1881416 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/tempDir-001/control/data, hostContext=/g_lpz, hostPort=32935, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/control-001/cores}
   [junit4]   2> 1881416 ERROR (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1881417 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1881417 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.5.0
   [junit4]   2> 1881417 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1881417 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1881417 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-09-05T22:08:09.705Z
   [junit4]   2> 1881418 INFO  (zkConnectionManagerCallback-5502-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881419 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1881419 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/control-001/solr.xml
   [junit4]   2> 1881422 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1881422 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1881423 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1881495 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:35025/solr
   [junit4]   2> 1881496 INFO  (zkConnectionManagerCallback-5506-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881499 INFO  (zkConnectionManagerCallback-5508-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881586 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:32935_g_lpz
   [junit4]   2> 1881586 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer Overseer (id=72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) starting
   [junit4]   2> 1881593 INFO  (zkConnectionManagerCallback-5515-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881601 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:35025/solr ready
   [junit4]   2> 1881602 INFO  (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:32935_g_lpz
   [junit4]   2> 1881602 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Process current queue of overseer operations
   [junit4]   2> 1881602 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:32935_g_lpz
   [junit4]   2> 1881608 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 0
   [junit4]   2> 1881608 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 1881608 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1881608 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 1881608 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: []
   [junit4]   2> 1881614 INFO  (zkCallback-5507-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1881614 INFO  (zkCallback-5514-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1881623 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1881638 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1881645 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1881646 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1881647 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/control-001/cores
   [junit4]   2> 1881673 INFO  (zkConnectionManagerCallback-5521-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1881674 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1881675 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:35025/solr ready
   [junit4]   2> 1881687 INFO  (qtp678888869-18558) [n:127.0.0.1:32935_g_lpz    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:32935_g_lpz&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1881691 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Got 1 tasks from work-queue : [[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ecf]]
   [junit4]   2> 1881691 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000000] as running
   [junit4]   2> 1881691 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Get the message id:/overseer/collection-queue-work/qn-0000000000 message:{
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 1881691 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Runner processing /overseer/collection-queue-work/qn-0000000000
   [junit4]   2> 1881691 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.OverseerCollectionMessageHandler OverseerCollectionMessageHandler.processMessage : create , {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 1881691 INFO  (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 1881692 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.OverseerCollectionMessageHandler creating collections conf node /collections/control_collection 
   [junit4]   2> 1881692 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Check for collection zkNode:control_collection
   [junit4]   2> 1881693 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Collection zkNode exists
   [junit4]   2> 1881693 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 1
   [junit4]   2> 1881693 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 1881693 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1881693 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 1881693 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 1881695 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"} current state version: 0
   [junit4]   2> 1881695 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ClusterStateMutator building a new cName: control_collection
   [junit4]   2> 1881695 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ZkStateWriter going to create_collection /collections/control_collection/state.json
   [junit4]   2> 1881795 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.Assign Identify nodes using default
   [junit4]   2> 1881795 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Creating SolrCores for new collection control_collection, shardNames [shard1] , message : {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 1881796 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Creating core control_collection_shard1_replica_n1 as part of shard shard1 of collection control_collection on 127.0.0.1:32935_g_lpz
   [junit4]   2> 1881803 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz    x:control_collection_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1881804 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz    x:control_collection_shard1_replica_n1] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1881806 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"} current state version: 0
   [junit4]   2> 1881806 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Update state numShards=1 message={
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 1881806 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node2:{"core":"control_collection_shard1_replica_n1","base_url":"https://127.0.0.1:32935/g_lpz","node_name":"127.0.0.1:32935_g_lpz","state":"down","type":"NRT"}
   [junit4]   2> 1881807 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(control_collection//collections/control_collection/state.json/0)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{"shard1":{
   [junit4]   2>       "range":"80000000-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node2":{
   [junit4]   2>           "core":"control_collection_shard1_replica_n1",
   [junit4]   2>           "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>           "node_name":"127.0.0.1:32935_g_lpz",
   [junit4]   2>           "state":"down",
   [junit4]   2>           "type":"NRT"}}}}}
   [junit4]   2> 1881907 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 0
   [junit4]   2> 1882817 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.5.0
   [junit4]   2> 1882828 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
   [junit4]   2> 1882943 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 1882955 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 1882955 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1882955 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 1882956 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/control-001/cores/control_collection_shard1_replica_n1/data/]
   [junit4]   2> 1882965 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=12, maxMergeAtOnceExplicit=10, maxMergedSegmentMB=63.03515625, floorSegmentMB=1.2822265625, forceMergeDeletesPctAllowed=18.90893993745281, segmentsPerTier=27.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0, deletesPctAllowed=21.135554869764203
   [junit4]   2> 1882972 WARN  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1883013 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1883014 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1883015 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1883015 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1883016 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=49, maxMergeAtOnceExplicit=26, maxMergedSegmentMB=51.6005859375, floorSegmentMB=0.64453125, forceMergeDeletesPctAllowed=21.467659206579803, segmentsPerTier=15.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0, deletesPctAllowed=47.59824710232721
   [junit4]   2> 1883017 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@7be20316[control_collection_shard1_replica_n1] main]
   [junit4]   2> 1883018 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1883018 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1883018 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1883019 INFO  (searcherExecutor-7297-thread-1-processing-n:127.0.0.1:32935_g_lpz x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@7be20316[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1883019 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1610807028017528832
   [junit4]   2> 1883025 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 1883028 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1883028 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1883028 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:32935/g_lpz/control_collection_shard1_replica_n1/
   [junit4]   2> 1883028 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 1883032 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy https://127.0.0.1:32935/g_lpz/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 1883032 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 1883037 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:32935/g_lpz/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 1883037 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"leader",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "state":"active"} current state version: 0
   [junit4]   2> 1883138 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 1
   [junit4]   2> 1883139 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1883140 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"} current state version: 0
   [junit4]   2> 1883140 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Update state numShards=1 message={
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:32935_g_lpz",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 1883140 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node2:{"core":"control_collection_shard1_replica_n1","base_url":"https://127.0.0.1:32935/g_lpz","node_name":"127.0.0.1:32935_g_lpz","state":"active","type":"NRT","leader":"true"}
   [junit4]   2> 1883140 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(control_collection//collections/control_collection/state.json/2)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{"shard1":{
   [junit4]   2>       "range":"80000000-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node2":{
   [junit4]   2>           "core":"control_collection_shard1_replica_n1",
   [junit4]   2>           "base_url":"https://127.0.0.1:32935/g_lpz",
   [junit4]   2>           "node_name":"127.0.0.1:32935_g_lpz",
   [junit4]   2>           "state":"active",
   [junit4]   2>           "type":"NRT",
   [junit4]   2>           "leader":"true"}}}}}
   [junit4]   2> 1883141 INFO  (qtp678888869-18554) [n:127.0.0.1:32935_g_lpz c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1337
   [junit4]   2> 1883141 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Finished create command on all shards for collection: control_collection
   [junit4]   2> 1883141 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Completed task:[/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 1883142 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000000] as completed.
   [junit4]   2> 1883142 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 1883142 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1883142 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 1883142 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 1883142 DEBUG (OverseerThreadFactory-7292-thread-1-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Message id:/overseer/collection-queue-work/qn-0000000000 complete, response:{success={127.0.0.1:32935_g_lpz={responseHeader={status=0,QTime=1337},core=control_collection_shard1_replica_n1}}}
   [junit4]   2> 1883143 INFO  (qtp678888869-18558) [n:127.0.0.1:32935_g_lpz    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 30 seconds. Check all shard replicas
   [junit4]   2> 1883240 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 2
   [junit4]   2> 1883241 INFO  (zkCallback-5507-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1883694 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Got 0 tasks from work-queue : [[]]
   [junit4]   2> 1883695 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 0
   [junit4]   2> 1883695 INFO  (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 1883695 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 1883695 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1883695 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 1883695 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: []
   [junit4]   2> 1884143 INFO  (qtp678888869-18558) [n:127.0.0.1:32935_g_lpz    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:32935_g_lpz&wt=javabin&version=2} status=0 QTime=2456
   [junit4]   2> 1884147 INFO  (zkConnectionManagerCallback-5526-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1884147 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1884148 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:35025/solr ready
   [junit4]   2> 1884148 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 1884150 INFO  (qtp678888869-18560) [n:127.0.0.1:32935_g_lpz    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Got 1 tasks from work-queue : [[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ed1]]
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000002] as running
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Get the message id:/overseer/collection-queue-work/qn-0000000002 message:{
   [junit4]   2>   "name":"collection1",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "createNodeSet":"",
   [junit4]   2>   "stateFormat":"1",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 1884157 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Runner processing /overseer/collection-queue-work/qn-0000000002
   [junit4]   2> 1884157 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.OverseerCollectionMessageHandler OverseerCollectionMessageHandler.processMessage : create , {
   [junit4]   2>   "name":"collection1",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "createNodeSet":"",
   [junit4]   2>   "stateFormat":"1",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 1884157 INFO  (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 1
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 1884157 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 1884158 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.OverseerCollectionMessageHandler creating collections conf node /collections/collection1 
   [junit4]   2> 1884159 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Check for collection zkNode:collection1
   [junit4]   2> 1884159 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Collection zkNode exists
   [junit4]   2> 1884160 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "name":"collection1",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "createNodeSet":"",
   [junit4]   2>   "stateFormat":"1",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"} current state version: 0
   [junit4]   2> 1884160 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ClusterStateMutator building a new cName: collection1
   [junit4]   2> 1884361 WARN  (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.a.c.CreateCollectionCmd Finished create command for collection: collection1
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Completed task:[/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000002] as completed.
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 1884361 DEBUG (OverseerThreadFactory-7292-thread-2-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Message id:/overseer/collection-queue-work/qn-0000000002 complete, response:{}
   [junit4]   2> 1884362 INFO  (qtp678888869-18560) [n:127.0.0.1:32935_g_lpz    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 30 seconds. Check all shard replicas
   [junit4]   2> 1884362 INFO  (qtp678888869-18560) [n:127.0.0.1:32935_g_lpz    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2} status=0 QTime=212
   [junit4]   2> 1884470 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-1-001 of type NRT
   [junit4]   2> 1884473 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1884473 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1884473 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1884473 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@716adb6d{/g_lpz,null,AVAILABLE}
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@32a701a5{SSL,[ssl, http/1.1]}{127.0.0.1:34762}
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.Server Started @1884536ms
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/g_lpz, hostPort=34762, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-1-001/cores}
   [junit4]   2> 1884474 ERROR (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.5.0
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1884474 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1884475 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-09-05T22:08:12.763Z
   [junit4]   2> 1884476 INFO  (zkConnectionManagerCallback-5528-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1884477 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1884477 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-1-001/solr.xml
   [junit4]   2> 1884480 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1884480 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1884481 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1884638 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:35025/solr
   [junit4]   2> 1884640 INFO  (zkConnectionManagerCallback-5532-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1884645 INFO  (zkConnectionManagerCallback-5534-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1884650 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1884653 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1884653 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34762_g_lpz
   [junit4]   2> 1884654 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"downnode",
   [junit4]   2>   "node_name":"127.0.0.1:34762_g_lpz"} current state version: 1
   [junit4]   2> 1884654 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.NodeMutator DownNode state invoked for node: 127.0.0.1:34762_g_lpz
   [junit4]   2> 1884656 INFO  (zkCallback-5525-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1884656 INFO  (zkCallback-5514-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1884656 INFO  (zkCallback-5507-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1884656 INFO  (zkCallback-5533-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1884672 INFO  (zkConnectionManagerCallback-5541-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1884673 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1884674 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:35025/solr ready
   [junit4]   2> 1884674 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1884690 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1884699 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1884699 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1884700 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-1-001/cores
   [junit4]   2> 1884724 INFO  (qtp678888869-18558) [n:127.0.0.1:32935_g_lpz    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:34762_g_lpz&action=ADDREPLICA&collection=collection1&shard=shard2&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1884729 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Got 1 tasks from work-queue : [[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ed3]]
   [junit4]   2> 1884729 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000004] as running
   [junit4]   2> 1884729 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Get the message id:/overseer/collection-queue-work/qn-0000000004 message:{
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "node":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"addreplica"}
   [junit4]   2> 1884732 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 1
   [junit4]   2> 1884732 INFO  (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 1884732 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningTasks: [/overseer/collection-queue-work/qn-0000000004]
   [junit4]   2> 1884732 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1884732 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 1884732 DEBUG (OverseerCollectionConfigSetProcessor-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000004]
   [junit4]   2> 1884733 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.OverseerTaskProcessor Runner processing /overseer/collection-queue-work/qn-0000000004
   [junit4]   2> 1884733 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.a.c.OverseerCollectionMessageHandler OverseerCollectionMessageHandler.processMessage : addreplica , {
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "node":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"addreplica"}
   [junit4]   2> 1884733 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.a.c.AddReplicaCmd addReplica() : {
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "node":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"addreplica"}
   [junit4]   2> 1884733 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.a.c.Assign getNodesForNewReplicas() shard: shard2 , replicas : 1 , createNodeSet 127.0.0.1:34762_g_lpz
   [junit4]   2> 1884733 INFO  (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:34762_g_lpz for creating new replica of shard shard2
   [junit4]   2> 1884755 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz    x:collection1_shard2_replica_n41] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard2_replica_n41&action=CREATE&collection=collection1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1884757 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"collection1_shard2_replica_n41",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"} current state version: 1
   [junit4]   2> 1884757 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Update state numShards=2 message={
   [junit4]   2>   "core":"collection1_shard2_replica_n41",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 1884758 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node42:{"core":"collection1_shard2_replica_n41","base_url":"https://127.0.0.1:34762/g_lpz","node_name":"127.0.0.1:34762_g_lpz","state":"down","type":"NRT"}
   [junit4]   2> 1884758 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(collection1//clusterstate.json/2147483647)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{
   [junit4]   2>     "shard1":{
   [junit4]   2>       "range":"80000000-ffffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{}},
   [junit4]   2>     "shard2":{
   [junit4]   2>       "range":"0-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node42":{
   [junit4]   2>           "core":"collection1_shard2_replica_n41",
   [junit4]   2>           "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>           "node_name":"127.0.0.1:34762_g_lpz",
   [junit4]   2>           "state":"down",
   [junit4]   2>           "type":"NRT"}}}}}
   [junit4]   2> 1885767 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.5.0
   [junit4]   2> 1885804 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.s.IndexSchema [collection1_shard2_replica_n41] Schema name=test
   [junit4]   2> 1885893 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 1885905 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard2_replica_n41' using configuration from collection collection1, trusted=true
   [junit4]   2> 1885905 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard2.replica_n41' (registry 'solr.core.collection1.shard2.replica_n41') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16
   [junit4]   2> 1885905 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 1885905 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SolrCore [[collection1_shard2_replica_n41] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-1-001/cores/collection1_shard2_replica_n41], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-1-001/cores/collection1_shard2_replica_n41/data/]
   [junit4]   2> 1885909 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=12, maxMergeAtOnceExplicit=10, maxMergedSegmentMB=63.03515625, floorSegmentMB=1.2822265625, forceMergeDeletesPctAllowed=18.90893993745281, segmentsPerTier=27.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0, deletesPctAllowed=21.135554869764203
   [junit4]   2> 1885914 WARN  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1885958 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1885958 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1885982 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1885982 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1885984 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=49, maxMergeAtOnceExplicit=26, maxMergedSegmentMB=51.6005859375, floorSegmentMB=0.64453125, forceMergeDeletesPctAllowed=21.467659206579803, segmentsPerTier=15.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=1.0, deletesPctAllowed=47.59824710232721
   [junit4]   2> 1885984 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.s.SolrIndexSearcher Opening [Searcher@423ce442[collection1_shard2_replica_n41] main]
   [junit4]   2> 1885985 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1885986 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1885986 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1885986 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1610807031128653824
   [junit4]   2> 1885989 INFO  (searcherExecutor-7311-thread-1-processing-n:127.0.0.1:34762_g_lpz x:collection1_shard2_replica_n41 c:collection1 s:shard2) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SolrCore [collection1_shard2_replica_n41] Registered new searcher Searcher@423ce442[collection1_shard2_replica_n41] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1885993 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard2 to Terms{values={core_node42=0}, version=0}
   [junit4]   2> 1885995 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1885995 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1885995 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:34762/g_lpz/collection1_shard2_replica_n41/
   [junit4]   2> 1885995 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 1885995 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.SyncStrategy https://127.0.0.1:34762/g_lpz/collection1_shard2_replica_n41/ has no replicas
   [junit4]   2> 1885995 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 1885998 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:34762/g_lpz/collection1_shard2_replica_n41/ shard2
   [junit4]   2> 1885998 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"leader",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>   "core":"collection1_shard2_replica_n41",
   [junit4]   2>   "state":"active"} current state version: 2
   [junit4]   2> 1886150 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1886151 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"collection1_shard2_replica_n41",
   [junit4]   2>   "core_node_name":"core_node42",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"} current state version: 3
   [junit4]   2> 1886151 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Update state numShards=2 message={
   [junit4]   2>   "core":"collection1_shard2_replica_n41",
   [junit4]   2>   "core_node_name":"core_node42",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>   "node_name":"127.0.0.1:34762_g_lpz",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "collection":"collection1",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 1886151 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node42:{"core":"collection1_shard2_replica_n41","base_url":"https://127.0.0.1:34762/g_lpz","node_name":"127.0.0.1:34762_g_lpz","state":"active","type":"NRT","leader":"true"}
   [junit4]   2> 1886151 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(collection1//clusterstate.json/2147483647)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{
   [junit4]   2>     "shard1":{
   [junit4]   2>       "range":"80000000-ffffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{}},
   [junit4]   2>     "shard2":{
   [junit4]   2>       "range":"0-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node42":{
   [junit4]   2>           "core":"collection1_shard2_replica_n41",
   [junit4]   2>           "base_url":"https://127.0.0.1:34762/g_lpz",
   [junit4]   2>           "node_name":"127.0.0.1:34762_g_lpz",
   [junit4]   2>           "state":"active",
   [junit4]   2>           "type":"NRT",
   [junit4]   2>           "leader":"true"}}}}}
   [junit4]   2> 1886152 INFO  (qtp1739290933-18610) [n:127.0.0.1:34762_g_lpz c:collection1 s:shard2  x:collection1_shard2_replica_n41] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard2_replica_n41&action=CREATE&collection=collection1&shard=shard2&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1397
   [junit4]   2> 1886152 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor Completed task:[/overseer/collection-queue-work/qn-0000000004]
   [junit4]   2> 1886153 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000004] as completed.
   [junit4]   2> 1886153 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 1886153 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 1886153 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor CompletedTasks: [/overseer/collection-queue-work/qn-0000000004]
   [junit4]   2> 1886153 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000004]
   [junit4]   2> 1886153 DEBUG (OverseerThreadFactory-7292-thread-3-processing-n:127.0.0.1:32935_g_lpz) [n:127.0.0.1:32935_g_lpz c:collection1 s:shard2  ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Message id:/overseer/collection-queue-work/qn-0000000004 complete, response:{success={127.0.0.1:34762_g_lpz={responseHeader={status=0,QTime=1397},core=collection1_shard2_replica_n41}}}
   [junit4]   2> 1886154 INFO  (qtp678888869-18558) [n:127.0.0.1:32935_g_lpz c:collection1   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:34762_g_lpz&action=ADDREPLICA&collection=collection1&shard=shard2&type=NRT&wt=javabin&version=2} status=0 QTime=1430
   [junit4]   2> 1886271 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-2-001 of type NRT
   [junit4]   2> 1886279 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1886280 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1886280 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1886280 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7761429a{/g_lpz,null,AVAILABLE}
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7f77d720{SSL,[ssl, http/1.1]}{127.0.0.1:36541}
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.e.j.s.Server Started @1886343ms
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/g_lpz, hostPort=36541, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-2-001/cores}
   [junit4]   2> 1886281 ERROR (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 7.5.0
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1886281 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-09-05T22:08:14.569Z
   [junit4]   2> 1886283 INFO  (zkConnectionManagerCallback-5544-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1886284 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1886284 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_E59D0883F6798B26-001/shard-2-001/solr.xml
   [junit4]   2> 1886287 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1886287 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1886288 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7f31ac16, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1886449 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:35025/solr
   [junit4]   2> 1886450 INFO  (zkConnectionManagerCallback-5548-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1886452 INFO  (zkConnectionManagerCallback-5550-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1886456 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1886459 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1886459 INFO  (TEST-ShardSplitTest.test-seed#[E59D0883F6798B26]) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36541_g_lpz
   [junit4]   2> 1886460 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"downnode",
   [junit4]   2>   "node_name":"127.0.0.1:36541_g_lpz"} current state version: 4
   [junit4]   2> 1886460 DEBUG (OverseerStateUpdate-72285269217378309-127.0.0.1:32935_g_lpz-n_0000000000) [n:127.0.0.1:32935_g_lpz    ] o.a.s.c.o.NodeMutator DownNode state invoked for node: 127.0.0.1:36541_g_lpz
   [junit4]   2> 1886460 INFO  (zkCallback-5525-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1886460 INFO  (zkCallback-5514-thread-1) [    ] o.a.s.c.c.ZkStateReader Up

[...truncated too long message...]

y-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/null301335912
     [copy] Copying 239 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/null301335912
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-7.x/solr/null301335912

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: org.eclipse.jgit#org.eclipse.jgit-caller;working
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;4.6.0.201612231935-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.53 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.apache.httpcomponents#httpclient;4.3.6 in public
[ivy:cachepath] 	found org.apache.httpcomponents#httpcore;4.3.3 in public
[ivy:cachepath] 	found commons-logging#commons-logging;1.1.3 in public
[ivy:cachepath] 	found commons-codec#commons-codec;1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 21ms :: artifacts dl 2ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   8   |   0   |   0   |   0   ||   8   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
[wc-checker] SLF4J: Defaulting to no-operation (NOP) logger implementation
[wc-checker] SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 90 minutes 5 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$34.hasMatch(FilePath.java:2678)
	at hudson.FilePath$34.invoke(FilePath.java:2557)
	at hudson.FilePath$34.invoke(FilePath.java:2547)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2918)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1036)
		at hudson.FilePath.act(FilePath.java:1025)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2547)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1819)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2920)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1038)
	at hudson.FilePath.act(FilePath.java:1025)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2547)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1819)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)