You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@lucene.apache.org by Policeman Jenkins Server <je...@thetaphi.de> on 2019/12/28 03:36:36 UTC

[JENKINS] Lucene-Solr-8.x-Linux (32bit/jdk1.8.0_201) - Build # 1683 - Unstable!

Build: https://jenkins.thetaphi.de/job/Lucene-Solr-8.x-Linux/1683/
Java: 32bit/jdk1.8.0_201 -server -XX:+UseG1GC

1 tests failed.
FAILED:  org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitAfterFailedSplit

Error Message:
Shard split did not succeed after a previous failed split attempt left sub-shards in construction state

Stack Trace:
java.lang.AssertionError: Shard split did not succeed after a previous failed split attempt left sub-shards in construction state
	at __randomizedtesting.SeedInfo.seed([94B0F475F4F8652:F0069CE8633ACBD8]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.splitAfterFailedSplit(ShardSplitTest.java:328)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitAfterFailedSplit(ShardSplitTest.java:290)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 15325 lines...]
   [junit4] Suite: org.apache.solr.cloud.api.collections.ShardSplitTest
   [junit4]   2> 2191729 INFO  (SUITE-ShardSplitTest-seed#[94B0F475F4F8652]-worker) [     ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/data-dir-197-001
   [junit4]   2> 2191729 WARN  (SUITE-ShardSplitTest-seed#[94B0F475F4F8652]-worker) [     ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=74 numCloses=74
   [junit4]   2> 2191729 INFO  (SUITE-ShardSplitTest-seed#[94B0F475F4F8652]-worker) [     ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 2191731 INFO  (SUITE-ShardSplitTest-seed#[94B0F475F4F8652]-worker) [     ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 2191731 INFO  (SUITE-ShardSplitTest-seed#[94B0F475F4F8652]-worker) [     ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 2191731 INFO  (SUITE-ShardSplitTest-seed#[94B0F475F4F8652]-worker) [     ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /oczqm/
   [junit4]   2> 2191735 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 2191736 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 2191736 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 2191836 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer start zk server on port:39087
   [junit4]   2> 2191836 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:39087
   [junit4]   2> 2191836 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:39087
   [junit4]   2> 2191836 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 39087
   [junit4]   2> 2191842 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2191858 INFO  (zkConnectionManagerCallback-10220-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2191858 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2191890 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2191891 INFO  (zkConnectionManagerCallback-10222-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2191891 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2191898 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 2191899 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/schema15.xml to /configs/conf1/schema.xml
   [junit4]   2> 2191907 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 2191910 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 2191911 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 2191912 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 2191927 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 2191932 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 2191933 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 2191934 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 2191935 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 2191935 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 2192048 WARN  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
   [junit4]   2> 2192048 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 2192048 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2192048 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.Server jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 1.8.0_201-b09
   [junit4]   2> 2192049 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2192049 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2192049 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2192051 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1a1cc21{/oczqm,null,AVAILABLE}
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.AbstractConnector Started ServerConnector@1d2e596{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45459}
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.s.Server Started @2192079ms
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/tempDir-001/control/data, replicaType=NRT, hostContext=/oczqm, hostPort=45459, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/control-001/cores}
   [junit4]   2> 2192052 ERROR (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.5.0
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2192052 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-12-28T02:56:46.311Z
   [junit4]   2> 2192053 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2192054 INFO  (zkConnectionManagerCallback-10224-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2192054 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2192155 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2192155 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/control-001/solr.xml
   [junit4]   2> 2192159 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2192159 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2192160 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2193215 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 2193215 WARN  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1f9e262[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2193215 WARN  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1f9e262[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2193219 WARN  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1e180ad[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2193219 WARN  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1e180ad[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2193220 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:39087/solr
   [junit4]   2> 2193221 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2193221 INFO  (zkConnectionManagerCallback-10231-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2193221 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2193324 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2193325 INFO  (zkConnectionManagerCallback-10233-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2193325 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2193360 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:45459_oczqm
   [junit4]   2> 2193360 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer Overseer (id=72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) starting
   [junit4]   2> 2193362 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Process current queue of overseer operations
   [junit4]   2> 2193362 INFO  (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:45459_oczqm
   [junit4]   2> 2193363 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 0 #Completed tasks: 0
   [junit4]   2> 2193363 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 2193363 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 2193363 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45459_oczqm
   [junit4]   2> 2193363 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 2193363 INFO  (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: []
   [junit4]   2> 2193364 INFO  (zkCallback-10232-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2193365 WARN  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.CoreContainer Not all security plugins configured!  authentication=disabled authorization=disabled.  Solr is only as secure as you make it. Consider configuring authentication/authorization before exposing Solr to users internal or external.  See https://s.apache.org/solrsecurity for more info
   [junit4]   2> 2193377 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2193394 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2193401 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2193401 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2193402 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.p.PackageLoader /packages.json updated to version -1
   [junit4]   2> 2193403 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/control-001/cores
   [junit4]   2> 2193418 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2193418 INFO  (zkConnectionManagerCallback-10242-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2193418 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2193419 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2193420 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:39087/solr ready
   [junit4]   2> 2193421 INFO  (qtp17929961-34364) [n:127.0.0.1:45459_oczqm     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:45459_oczqm&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Got 1 tasks from work-queue : [[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ecf]]
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000000] as running
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Get the message id:/overseer/collection-queue-work/qn-0000000000 message:{
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 2193422 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Runner processing /overseer/collection-queue-work/qn-0000000000
   [junit4]   2> 2193422 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.OverseerCollectionMessageHandler OverseerCollectionMessageHandler.processMessage : create , {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 1 #Completed tasks: 0
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 2193422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 2193422 INFO  (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 2193422 INFO  (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 2193422 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.OverseerCollectionMessageHandler creating collections conf node /collections/control_collection 
   [junit4]   2> 2193422 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Check for collection zkNode:control_collection
   [junit4]   2> 2193423 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Collection zkNode exists
   [junit4]   2> 2193423 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"} current state version: 0
   [junit4]   2> 2193423 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ClusterStateMutator building a new cName: control_collection
   [junit4]   2> 2193424 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ZkStateWriter going to create_collection /collections/control_collection/state.json
   [junit4]   2> 2193524 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Creating SolrCores for new collection control_collection, shardNames [shard1] , message : {
   [junit4]   2>   "name":"control_collection",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "createNodeSet":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "stateFormat":"2",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 2193525 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Creating core control_collection_shard1_replica_n1 as part of shard shard1 of collection control_collection on 127.0.0.1:45459_oczqm
   [junit4]   2> 2193526 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm    x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2193526 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm    x:control_collection_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2193528 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"} current state version: 0
   [junit4]   2> 2193528 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ReplicaMutator Update state numShards=1 message={
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 2193528 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node2:{
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "type":"NRT"}
   [junit4]   2> 2193528 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(control_collection//collections/control_collection/state.json/0)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{"shard1":{
   [junit4]   2>       "range":"80000000-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node2":{
   [junit4]   2>           "core":"control_collection_shard1_replica_n1",
   [junit4]   2>           "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>           "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>           "state":"down",
   [junit4]   2>           "type":"NRT"}}}}}
   [junit4]   2> 2193629 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 0
   [junit4]   2> 2194772 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.5.0
   [junit4]   2> 2194787 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
   [junit4]   2> 2194872 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 2194885 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 2194886 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2194886 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/control-001/cores/control_collection_shard1_replica_n1/data/]
   [junit4]   2> 2194890 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=1100019739, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1]
   [junit4]   2> 2195129 WARN  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2195166 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2195166 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2195167 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2195167 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2195168 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=46, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.8186758059649729]
   [junit4]   2> 2195168 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@18c5a6b[control_collection_shard1_replica_n1] main]
   [junit4]   2> 2195169 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 2195169 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2195170 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2195170 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1654130537323823104
   [junit4]   2> 2195173 INFO  (searcherExecutor-9295-thread-1-processing-n:127.0.0.1:45459_oczqm x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@18c5a6b[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2195174 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 2195174 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 2195176 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2195176 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2195176 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:45459/oczqm/control_collection_shard1_replica_n1/
   [junit4]   2> 2195176 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 2195176 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:45459/oczqm/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 2195176 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/72058373637013508-core_node2-n_0000000000
   [junit4]   2> 2195177 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:45459/oczqm/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 2195177 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"leader",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "state":"active"} current state version: 0
   [junit4]   2> 2195278 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 1
   [junit4]   2> 2195278 INFO  (zkCallback-10232-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 2195278 INFO  (zkCallback-10232-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 2195278 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 2195279 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"} current state version: 0
   [junit4]   2> 2195279 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ReplicaMutator Update state numShards=1 message={
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "core_node_name":"core_node2",
   [junit4]   2>   "roles":null,
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "numShards":"1",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "collection":"control_collection",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"state"}
   [junit4]   2> 2195279 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ReplicaMutator Will update state for replica: core_node2:{
   [junit4]   2>   "core":"control_collection_shard1_replica_n1",
   [junit4]   2>   "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>   "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>   "state":"active",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "leader":"true"}
   [junit4]   2> 2195279 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ReplicaMutator Collection is now: DocCollection(control_collection//collections/control_collection/state.json/2)={
   [junit4]   2>   "pullReplicas":"0",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "router":{"name":"compositeId"},
   [junit4]   2>   "maxShardsPerNode":"1",
   [junit4]   2>   "autoAddReplicas":"false",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "tlogReplicas":"0",
   [junit4]   2>   "shards":{"shard1":{
   [junit4]   2>       "range":"80000000-7fffffff",
   [junit4]   2>       "state":"active",
   [junit4]   2>       "replicas":{"core_node2":{
   [junit4]   2>           "core":"control_collection_shard1_replica_n1",
   [junit4]   2>           "base_url":"http://127.0.0.1:45459/oczqm",
   [junit4]   2>           "node_name":"127.0.0.1:45459_oczqm",
   [junit4]   2>           "state":"active",
   [junit4]   2>           "type":"NRT",
   [junit4]   2>           "leader":"true"}}}}}
   [junit4]   2> 2195280 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1754
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Finished create command on all shards for collection: control_collection
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Completed task:[/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000000] as completed.
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 2195280 INFO  (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000000]
   [junit4]   2> 2195280 DEBUG (OverseerThreadFactory-9290-thread-1-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Message id:/overseer/collection-queue-work/qn-0000000000 complete, response:{success={127.0.0.1:45459_oczqm={responseHeader={status=0,QTime=1754},core=control_collection_shard1_replica_n1}}}
   [junit4]   2> 2195281 INFO  (qtp17929961-34364) [n:127.0.0.1:45459_oczqm     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 2195380 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ZkStateWriter going to update_collection /collections/control_collection/state.json version: 2
   [junit4]   2> 2195380 INFO  (zkCallback-10232-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 2195380 INFO  (zkCallback-10232-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 2195380 INFO  (zkCallback-10232-thread-3) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 2195380 INFO  (qtp17929961-34364) [n:127.0.0.1:45459_oczqm     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:45459_oczqm&wt=javabin&version=2} status=0 QTime=1959
   [junit4]   2> 2195380 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: control_collection
   [junit4]   2> 2195422 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Got 0 tasks from work-queue : [[]]
   [junit4]   2> 2195423 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 0 #Completed tasks: 1
   [junit4]   2> 2195423 INFO  (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 2195423 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 2195423 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 2195423 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 2195423 INFO  (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: []
   [junit4]   2> 2195485 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2195486 INFO  (zkConnectionManagerCallback-10248-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2195486 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2195487 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2195488 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:39087/solr ready
   [junit4]   2> 2195488 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 2195489 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Got 1 tasks from work-queue : [[org.apache.solr.cloud.OverseerTaskQueue$QueueEvent@2e684ed1]]
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000002] as running
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Get the message id:/overseer/collection-queue-work/qn-0000000002 message:{
   [junit4]   2>   "name":"collection1",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "createNodeSet":"",
   [junit4]   2>   "stateFormat":"1",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 2195490 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Runner processing /overseer/collection-queue-work/qn-0000000002
   [junit4]   2> 2195490 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.OverseerCollectionMessageHandler OverseerCollectionMessageHandler.processMessage : create , {
   [junit4]   2>   "name":"collection1",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "createNodeSet":"",
   [junit4]   2>   "stateFormat":"1",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"}
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Cleaning up work-queue. #Running tasks: 1 #Completed tasks: 0
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 2195490 DEBUG (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: []
   [junit4]   2> 2195490 INFO  (OverseerCollectionConfigSetProcessor-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 2195490 INFO  (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 2195490 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.OverseerCollectionMessageHandler creating collections conf node /collections/collection1 
   [junit4]   2> 2195490 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Check for collection zkNode:collection1
   [junit4]   2> 2195491 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Collection zkNode exists
   [junit4]   2> 2195491 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "name":"collection1",
   [junit4]   2>   "fromApi":"true",
   [junit4]   2>   "collection.configName":"conf1",
   [junit4]   2>   "numShards":"2",
   [junit4]   2>   "createNodeSet":"",
   [junit4]   2>   "stateFormat":"1",
   [junit4]   2>   "nrtReplicas":"1",
   [junit4]   2>   "replicationFactor":"1",
   [junit4]   2>   "operation":"create"} current state version: 0
   [junit4]   2> 2195491 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.ClusterStateMutator building a new cName: collection1
   [junit4]   2> 2195692 WARN  (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.a.c.CreateCollectionCmd Finished create command for collection: collection1
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Completed task:[/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Marked task [/overseer/collection-queue-work/qn-0000000002] as completed.
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningTasks: []
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor BlockedTasks: []
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor CompletedTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 2195692 INFO  (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor RunningZKTasks: [/overseer/collection-queue-work/qn-0000000002]
   [junit4]   2> 2195692 DEBUG (OverseerThreadFactory-9290-thread-2-processing-n:127.0.0.1:45459_oczqm) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.OverseerTaskProcessor Overseer Collection Message Handler: Message id:/overseer/collection-queue-work/qn-0000000002 complete, response:{}
   [junit4]   2> 2195693 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 2195693 INFO  (qtp17929961-34363) [n:127.0.0.1:45459_oczqm     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2} status=0 QTime=204
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 2 expected:2
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 2 expected:2
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 2 expected:2
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 2195693 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=4
   [junit4]   2> 2195793 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-1-001 of type NRT
   [junit4]   2> 2195794 WARN  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
   [junit4]   2> 2195794 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 2195794 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2195794 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.Server jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 1.8.0_201-b09
   [junit4]   2> 2195795 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2195795 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2195795 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1289182{/oczqm,null,AVAILABLE}
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@16353b3{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:33615}
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.e.j.s.Server Started @2195823ms
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/tempDir-001/jetty1, solrconfig=solrconfig.xml, hostContext=/oczqm, hostPort=33615, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-1-001/cores}
   [junit4]   2> 2195796 ERROR (closeThreadPool-10249-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.5.0
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2195796 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-12-28T02:56:50.055Z
   [junit4]   2> 2195797 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2195798 INFO  (zkConnectionManagerCallback-10251-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2195798 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2195877 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-2-001 of type NRT
   [junit4]   2> 2195878 WARN  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.Server jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 1.8.0_201-b09
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 2195878 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7ef67d{/oczqm,null,AVAILABLE}
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.AbstractConnector Started ServerConnector@1e0b0f7{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:36409}
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.e.j.s.Server Started @2195905ms
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/tempDir-001/jetty2, solrconfig=solrconfig.xml, hostContext=/oczqm, hostPort=36409, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-2-001/cores}
   [junit4]   2> 2195879 ERROR (closeThreadPool-10249-thread-2) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.5.0
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2195879 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-12-28T02:56:50.138Z
   [junit4]   2> 2195880 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2195880 INFO  (zkConnectionManagerCallback-10253-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2195880 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2195899 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2195899 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-1-001/solr.xml
   [junit4]   2> 2195903 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2195903 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2195903 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2195962 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-3-001 of type NRT
   [junit4]   2> 2195963 WARN  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
   [junit4]   2> 2195963 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 2195963 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2195963 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.Server jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 1.8.0_201-b09
   [junit4]   2> 2195972 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2195972 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2195972 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 2195973 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@ba3112{/oczqm,null,AVAILABLE}
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.AbstractConnector Started ServerConnector@17b02cb{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:37631}
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.e.j.s.Server Started @2196004ms
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/tempDir-001/jetty3, solrconfig=solrconfig.xml, hostContext=/oczqm, hostPort=37631, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-3-001/cores}
   [junit4]   2> 2195977 ERROR (closeThreadPool-10249-thread-3) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.5.0
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2195977 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-12-28T02:56:50.236Z
   [junit4]   2> 2195982 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2195982 INFO  (zkConnectionManagerCallback-10256-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2195982 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2196000 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2196000 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-2-001/solr.xml
   [junit4]   2> 2196003 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2196003 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2196004 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2196059 INFO  (TEST-ShardSplitTest.testSplitStaticIndexReplicationLink-seed#[94B0F475F4F8652]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 4 in directory /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-4-001 of type NRT
   [junit4]   2> 2196065 WARN  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.h.g.GzipHandler minGzipSize of 0 is inefficient for short content, break even is size 23
   [junit4]   2> 2196065 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 2196065 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2196065 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.Server jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 1.8.0_201-b09
   [junit4]   2> 2196080 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2196080 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2196080 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 2196080 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@ebf13{/oczqm,null,AVAILABLE}
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.AbstractConnector Started ServerConnector@1880c55{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45175}
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.e.j.s.Server Started @2196108ms
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/tempDir-001/jetty4, solrconfig=solrconfig.xml, hostContext=/oczqm, hostPort=45175, coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-4-001/cores}
   [junit4]   2> 2196081 ERROR (closeThreadPool-10249-thread-4) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.5.0
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2196081 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-12-28T02:56:50.340Z
   [junit4]   2> 2196082 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2196083 INFO  (zkConnectionManagerCallback-10259-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2196083 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2196102 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2196102 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-3-001/solr.xml
   [junit4]   2> 2196106 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2196106 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2196106 INFO  (closeThreadPool-10249-thread-3) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2196202 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 2196202 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-4-001/solr.xml
   [junit4]   2> 2196204 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2196204 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2196205 INFO  (closeThreadPool-10249-thread-4) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2196340 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 2196341 WARN  (closeThreadPool-10249-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@10f7bc4[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2196341 WARN  (closeThreadPool-10249-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@10f7bc4[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2196344 WARN  (closeThreadPool-10249-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@c010e9[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2196344 WARN  (closeThreadPool-10249-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@c010e9[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2196345 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:39087/solr
   [junit4]   2> 2196346 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2196355 INFO  (zkConnectionManagerCallback-10267-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2196355 INFO  (closeThreadPool-10249-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2196470 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 2196472 INFO  (zkConnectionManagerCallback-10269-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2196472 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 2196475 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2196476 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.ZkController Publish node=127.0.0.1:36409_oczqm as DOWN
   [junit4]   2> 2196477 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2196477 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36409_oczqm
   [junit4]   2> 2196477 INFO  (zkCallback-10247-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2196478 WARN  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.CoreContainer Not all security plugins configured!  authentication=disabled authorization=disabled.  Solr is only as secure as you make it. Consider configuring authentication/authorization before exposing Solr to users internal or external.  See https://s.apache.org/solrsecurity for more info
   [junit4]   2> 2196483 INFO  (zkCallback-10232-thread-2) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2196483 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.Overseer processMessage: queueSize: 1, message = {
   [junit4]   2>   "operation":"downnode",
   [junit4]   2>   "node_name":"127.0.0.1:36409_oczqm"} current state version: 1
   [junit4]   2> 2196483 DEBUG (OverseerStateUpdate-72058373637013508-127.0.0.1:45459_oczqm-n_0000000000) [n:127.0.0.1:45459_oczqm     ] o.a.s.c.o.NodeMutator DownNode state invoked for node: 127.0.0.1:36409_oczqm
   [junit4]   2> 2196491 INFO  (zkCallback-10268-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2196503 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2196526 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2196536 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2196536 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@f9fd8c
   [junit4]   2> 2196537 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.p.PackageLoader /packages.json updated to version -1
   [junit4]   2> 2196538 INFO  (closeThreadPool-10249-thread-2) [n:127.0.0.1:36409_oczqm     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001/shard-2-001/cores
   [junit4]   2> 2196572 INFO  (closeThreadPool-10249-thread-1) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 2196577 WARN  (closeThreadPool-10249-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1213b78[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2196577 WARN  (closeThreadPool-10249-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1213b78[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2196581 WARN  (closeThreadPool-10249-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configur

[...truncated too long message...]

ped scavenging
   [junit4]   2> 2436036 INFO  (TEST-ShardSplitTest.testSplitMixedReplicaTypes-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer Shutting down ZkTestServer.
   [junit4]   2> 2436238 WARN  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	14	/solr/collections/testSplitMixedReplicaTypes_rewrite/terms/shard1
   [junit4]   2> 	12	/solr/collections/testSplitMixedReplicaTypes_rewrite/terms/shard1_1
   [junit4]   2> 	10	/solr/collections/testSplitMixedReplicaTypes_rewrite/terms/shard1_0
   [junit4]   2> 	7	/solr/aliases.json
   [junit4]   2> 	5	/solr/packages.json
   [junit4]   2> 	5	/solr/security.json
   [junit4]   2> 	5	/solr/configs/conf1
   [junit4]   2> 	5	/solr/collections/collection1/terms/shard1
   [junit4]   2> 	5	/solr/collections/collection1/terms/shard2
   [junit4]   2> 	4	/solr/collections/collection1/collectionprops.json
   [junit4]   2> 	3	/solr/collections/testSplitMixedReplicaTypes_rewrite/collectionprops.json
   [junit4]   2> 	2	/solr/collections/control_collection/terms/shard1
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	206	/solr/collections/testSplitMixedReplicaTypes_rewrite/state.json
   [junit4]   2> 	67	/solr/collections/collection1/state.json
   [junit4]   2> 	12	/solr/collections/control_collection/state.json
   [junit4]   2> 	7	/solr/clusterprops.json
   [junit4]   2> 	7	/solr/clusterstate.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	95	/solr/overseer/queue
   [junit4]   2> 	35	/solr/live_nodes
   [junit4]   2> 	22	/solr/overseer/collection-queue-work
   [junit4]   2> 	13	/solr/collections
   [junit4]   2> 
   [junit4]   2> 2436239 INFO  (TEST-ShardSplitTest.testSplitMixedReplicaTypes-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer waitForServerDown: 127.0.0.1:34893
   [junit4]   2> 2436239 INFO  (TEST-ShardSplitTest.testSplitMixedReplicaTypes-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:34893
   [junit4]   2> 2436239 INFO  (TEST-ShardSplitTest.testSplitMixedReplicaTypes-seed#[94B0F475F4F8652]) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 34893
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/workspace/Lucene-Solr-8.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.ShardSplitTest_94B0F475F4F8652-001
   [junit4]   2> Dec 28, 2019 3:00:50 AM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 1 leaked thread(s).
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene84): {rnd_b=PostingsFormat(name=Direct), a_t=Lucene84, _root_=Lucene84, id=PostingsFormat(name=Direct), foo_s=Lucene84}, docValues:{rnd_b=DocValuesFormat(name=Lucene80), _version_=DocValuesFormat(name=Direct), a_t=DocValuesFormat(name=Direct), _root_=DocValuesFormat(name=Direct), a_i=DocValuesFormat(name=Lucene80), id=DocValuesFormat(name=Lucene80), foo_s=DocValuesFormat(name=Direct)}, maxPointsInLeafNode=1541, maxMBSortInHeap=7.695034272368648, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@105a8b1), locale=it-CH, timezone=America/Montreal
   [junit4]   2> NOTE: Linux 5.0.0-37-generic i386/Oracle Corporation 1.8.0_201 (32-bit)/cpus=8,threads=1,free=177306776,total=536870912
   [junit4]   2> NOTE: All tests run in this JVM: [CleanupOldIndexTest, TestReplicationHandlerBackup, EnumFieldTest, TestSchemaSimilarityResource, TestTrieFacet, RankQueryTest, AutoscalingHistoryHandlerTest, SolrCLIZkUtilsTest, TestJsonFacetsWithNestedObjects, SampleTest, TestConfigSetsAPIExclusivity, RequiredFieldsTest, FileUtilsTest, PackageManagerCLITest, ActionThrottleTest, TestIBSimilarityFactory, TestSuggestSpellingConverter, TestCoreBackup, ExplicitHLLTest, TestLogWatcher, PingRequestHandlerTest, TestPayloadCheckQParserPlugin, UpdateLogTest, SpellCheckCollatorWithCollapseTest, AutoAddReplicasIntegrationTest, SuggestComponentContextFilterQueryTest, TestSchemaField, TestSolrCoreParser, TestFacetMethods, EchoParamsTest, TestDownShardTolerantSearch, ZkCLITest, SolrCmdDistributorTest, SuggesterTSTTest, SolrCoreMetricManagerTest, TestZkAclsWithHadoopAuth, TestExportTool, HLLUtilTest, DocumentBuilderTest, RecoveryZkTest, TestSlowCompositeReaderWrapper, TestFieldCollectionResource, TestSolrXml, ConfigureRecoveryStrategyTest, HighlighterTest, TestSimPolicyCloud, TestDefaultStatsCache, ConnectionManagerTest, TestLuceneIndexBackCompat, TestSnapshotCloudManager, TestRealTimeGet, LoggingHandlerTest, TestMaxScoreQueryParser, TestWithCollection, TestCoreContainer, JSONWriterTest, NodeLostTriggerTest, TestQuerySenderListener, TestTrie, IndexSizeTriggerTest, NestedShardedAtomicUpdateTest, AddSchemaFieldsUpdateProcessorFactoryTest, PeerSyncWithBufferUpdatesTest, TestAuthenticationFramework, TestChildDocTransformerHierarchy, QueryResultKeyTest, HttpSolrCallGetCoreTest, TestFieldCache, HttpPartitionWithTlogReplicasTest, FieldAnalysisRequestHandlerTest, IndexSchemaTest, SolrRequestParserTest, TestLFUCache, TestLegacyBM25SimilarityFactory, TestSolrCoreProperties, TestCrossCoreJoin, TestGroupingSearch, TestCustomStream, DistributedTermsComponentTest, TestWriterPerf, CoreAdminOperationTest, DebugComponentTest, TestReload, TestTrackingShardHandlerFactory, TestMultiWordSynonyms, TestSubQueryTransformerCrossCore, TestFieldCacheSortRandom, TestSimComputePlanAction, TestLocalFSCloudBackupRestore, TestDocSet, TestManagedResourceStorage, TestHalfAndHalfDocValues, LeaderElectionTest, DocExpirationUpdateProcessorFactoryTest, BigEndianAscendingWordDeserializerTest, TestComplexPhraseQParserPlugin, TestCodecSupport, TestSimLargeCluster, TestDelegationWithHadoopAuth, LeaderTragicEventTest, TimeRoutedAliasUpdateProcessorTest, CurrencyRangeFacetCloudTest, HdfsSyncSliceTest, CursorPagingTest, TestRetrieveFieldsOptimizer, DistanceFunctionTest, SSLMigrationTest, XmlUpdateRequestHandlerTest, TestConfigSetImmutable, TestRandomCollapseQParserPlugin, TriggerIntegrationTest, HdfsUnloadDistributedZkTest, TestInfoStreamLogging, TestSolrDeletionPolicy1, UUIDFieldTest, TestRestoreCore, TestSortableTextField, TestHdfsUpdateLog, TestRebalanceLeaders, TestSolrCloudWithSecureImpersonation, TermVectorComponentTest, TestPKIAuthenticationPlugin, TestDynamicLoadingUrl, HttpTriggerListenerTest, TestSolr4Spatial, SolrJmxReporterTest, TestCollectionAPI, PrimUtilsTest, UUIDUpdateProcessorFallbackTest, TestPerFieldSimilarityWithDefaultOverride, HdfsChaosMonkeyNothingIsSafeTest, BlockJoinFacetRandomTest, TestDistributedMissingSort, TestCaffeineCache, ExitableDirectoryReaderTest, ResponseBuilderTest, SearchRateTriggerIntegrationTest, TestCloudNestedDocsSort, ReplaceNodeNoTargetTest, TestSearcherReuse, AssignTest, TestReversedWildcardFilterFactory, TestCSVResponseWriter, AnalysisAfterCoreReloadTest, BasicFunctionalityTest, ConvertedLegacyTest, TestRandomDVFaceting, TestRandomFaceting, TestSimpleTrackingShardHandler, TestCharFilters, TestDeprecatedFilters, TestLuceneMatchVersion, TestEmbeddedSolrServerSchemaAPI, TestJettySolrRunner, ConnectionReuseTest, AssignBackwardCompatibilityTest, BasicDistributedZk2Test, BasicZkTest, ChaosMonkeyNothingIsSafeTest, ChaosMonkeyNothingIsSafeWithPullReplicasTest, ConcurrentCreateRoutedAliasTest, ConfigSetsAPITest, DeleteInactiveReplicaTest, DeleteShardTest, DeleteStatusTest, DistribCursorPagingTest, DistribDocExpirationUpdateProcessorTest, DocValuesNotIndexedTest, ForceLeaderWithTlogReplicasTest, HealthCheckHandlerTest, HttpPartitionOnCommitTest, MetricsHistoryIntegrationTest, MigrateRouteKeyTest, MultiThreadedOCPTest, OverriddenZkACLAndCredentialsProvidersTest, OverseerModifyCollectionTest, OverseerSolrResponseTest, OverseerSolrResponseUnsafeSerializationTest, OverseerStatusTest, RestartWhileUpdatingTest, RollingRestartTest, SharedFSAutoReplicaFailoverTest, SliceStateTest, SplitShardTest, TestCloudPivotFacet, TestCloudRecovery2, TestConfigSetsAPI, TestLeaderElectionWithEmptyReplica, TestPrepRecovery, TestQueryingOnDownCollection, TestRandomFlRTGCloud, TestTolerantUpdateProcessorCloud, TestTolerantUpdateProcessorRandomCloud, TestUtilizeNode, TestWaitForStateWithJettyShutdowns, VMParamsZkACLAndCredentialsProvidersTest, ZkNodePropsTest, ZkShardTermsTest, ZkSolrClientTest, CollectionsAPIAsyncDistributedZkTest, ConcurrentDeleteAndCreateCollectionTest, CustomCollectionTest, HdfsCollectionsAPIDistributedZkTest, ShardSplitTest]
   [junit4] Completed [590/891 (1!)] on J0 in 245.46s, 11 tests, 1 failure, 3 skipped <<< FAILURES!

[...truncated 49315 lines...]
[repro] Jenkins log URL: https://jenkins.thetaphi.de/job/Lucene-Solr-8.x-Linux/1683/consoleText

[repro] Revision: a6a220c89ae9dbbebf23cca1eb923ddb45d9b433

[repro] Ant options: "-Dargs=-server -XX:+UseG1GC"
[repro] Repro line:  ant test  -Dtestcase=ShardSplitTest -Dtests.method=testSplitAfterFailedSplit -Dtests.seed=94B0F475F4F8652 -Dtests.multiplier=3 -Dtests.slow=true -Dtests.locale=it-CH -Dtests.timezone=America/Montreal -Dtests.asserts=true -Dtests.file.encoding=UTF-8

[repro] JUnit rest result XML files will be moved to: ./repro-reports
[repro] ant clean

[...truncated 6 lines...]
[repro] Test suites by module:
[repro]    solr/core
[repro]       ShardSplitTest
[repro] ant compile-test

[...truncated 3600 lines...]
[repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=5 -Dtests.class="*.ShardSplitTest" -Dtests.showOutput=onerror "-Dargs=-server -XX:+UseG1GC" -Dtests.seed=94B0F475F4F8652 -Dtests.multiplier=3 -Dtests.slow=true -Dtests.locale=it-CH -Dtests.timezone=America/Montreal -Dtests.asserts=true -Dtests.file.encoding=UTF-8

[...truncated 135 lines...]
[repro] Failures w/original seeds:
[repro]   0/5 failed: org.apache.solr.cloud.api.collections.ShardSplitTest
[repro] Exiting with code 0

[...truncated 43 lines...]

Re: [JENKINS] Lucene-Solr-8.x-Linux (64bit/jdk-13.0.1) - Build # 1684 - Failure!

Posted by Robert Muir <rc...@gmail.com>.
Possibly this bug: https://bugs.openjdk.java.net/browse/JDK-8230565

So when the 13.0.2 comes out, maybe it goes away.

On Sun, Dec 29, 2019 at 2:11 AM Robert Muir <rc...@gmail.com> wrote:

> Looks like a bug with this garbage collector (ZGC). We should file a bug
> report, the stack trace might be helpful to them:
>
> Current CompileTask:
> C2: 489037 23064 %     4       org.apache.lucene.index.RandomPostingsTester::verifyEnum @ 1415 (3617 bytes)
>
> Stack: [0x00007f7be6564000,0x00007f7be6665000],  sp=0x00007f7be665fb60,  free space=1006k
> Native frames: (J=compiled Java code, A=aot compiled Java code, j=interpreted, Vv=VM code, C=native code)
> V  [libjvm.so+0xce71c9]  PhaseIterGVN::transform_old(Node*)+0x159
> V  [libjvm.so+0xce3874]  PhaseIterGVN::optimize()+0x134
> V  [libjvm.so+0x1027184]  ZBarrierSetC2::insert_barriers_on_unsafe(PhaseIdealLoop*) const+0x364
> V  [libjvm.so+0x10283b8]  ZBarrierSetC2::optimize_loops(PhaseIdealLoop*, LoopOptsMode, VectorSet&, Node_Stack&, Node_List&) const+0x38
> V  [libjvm.so+0xb21ac6]  PhaseIdealLoop::build_and_optimize(LoopOptsMode)+0xad6
> V  [libjvm.so+0x638ecd]  PhaseIdealLoop::optimize(PhaseIterGVN&, LoopOptsMode)+0x1dd
> V  [libjvm.so+0x63703f]  Compile::Optimize()+0x83f
> V  [libjvm.so+0x63860a]  Compile::Compile(ciEnv*, C2Compiler*, ciMethod*, int, bool, bool, bool, DirectiveSet*)+0xd2a
> V  [libjvm.so+0x55fadc]  C2Compiler::compile_method(ciEnv*, ciMethod*, int, DirectiveSet*)+0xbc
> V  [libjvm.so+0x64229d]  CompileBroker::invoke_compiler_on_method(CompileTask*)+0x3fd
> V  [libjvm.so+0x643c70]  CompileBroker::compiler_thread_loop()+0x5d0
> V  [libjvm.so+0xf6b9fe]  JavaThread::thread_main_inner()+0x1be
> V  [libjvm.so+0xf707fd]  Thread::call_run()+0x10d
> V  [libjvm.so+0xc875b7]  thread_native_entry(Thread*)+0xe7
>
>
> On Sun, Dec 29, 2019 at 1:29 AM Mikhail Khludnev <mk...@apache.org> wrote:
>
>> Hi, Dev.
>>
>> This happens though December. What we supposed to do?
>>
>>    [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883,
>> tid=30952
>>    [junit4] #
>>    [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build
>> 13.0.1+9)
>>    [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode,
>> tiered, z gc, linux-amd64)
>>    [junit4] # Problematic frame:
>>    [junit4] # V  [libjvm.so+0xce71c9]  PhaseIterGVN::transform_old
>> (Node*)+0x159
>>
>> On Sat, Dec 28, 2019 at 1:09 PM Policeman Jenkins Server <
>> jenkins@thetaphi.de> wrote:
>>
>>> Build: https://jenkins.thetaphi.de/job/Lucene-Solr-8.x-Linux/1684/
>>> Java: 64bit/jdk-13.0.1 -XX:+UseCompressedOops
>>> -XX:+UnlockExperimentalVMOptions -XX:+UseZGC
>>>
>>> All tests passed
>>>
>>> Build Log:
>>> [...truncated 1396 lines...]
>>>    [junit4] JVM J0: stdout was not empty, see:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>>>    [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
>>>    [junit4] #
>>>    [junit4] # A fatal error has been detected by the Java Runtime
>>> Environment:
>>>    [junit4] #
>>>    [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883,
>>> tid=30952
>>>    [junit4] #
>>>    [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build
>>> 13.0.1+9)
>>>    [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode,
>>> tiered, z gc, linux-amd64)
>>>    [junit4] # Problematic frame:
>>>    [junit4] # V  [libjvm.so+0xce71c9]
>>> PhaseIterGVN::transform_old(Node*)+0x159
>>>    [junit4] #
>>>    [junit4] # No core dump will be written. Core dumps have been
>>> disabled. To enable core dumping, try "ulimit -c unlimited" before starting
>>> Java again
>>>    [junit4] #
>>>    [junit4] # An error report file with more information is saved as:
>>>    [junit4] #
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/hs_err_pid30883.log
>>>    [junit4] [thread 6488 also had an error]
>>>    [junit4] #
>>>    [junit4] # Compiler replay data is saved as:
>>>    [junit4] #
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/replay_pid30883.log
>>>    [junit4] #
>>>    [junit4] # If you would like to submit a bug report, please visit:
>>>    [junit4] #   https://github.com/AdoptOpenJDK/openjdk-build/issues
>>>    [junit4] #
>>>    [junit4] <<< JVM J0: EOF ----
>>>
>>> [...truncated 798 lines...]
>>>    [junit4] ERROR: JVM J0 ended with an exception, command line:
>>> /home/jenkins/tools/java/64bit/jdk-13.0.1/bin/java -XX:+UseCompressedOops
>>> -XX:+UnlockExperimentalVMOptions -XX:+UseZGC
>>> -XX:+HeapDumpOnOutOfMemoryError
>>> -XX:HeapDumpPath=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/heapdumps
>>> -ea -esa --illegal-access=deny -Dtests.prefix=tests
>>> -Dtests.seed=C474FBC796E01686 -Xmx512M -Dtests.iters= -Dtests.verbose=false
>>> -Dtests.infostream=false -Dtests.codec=random -Dtests.postingsformat=random
>>> -Dtests.docvaluesformat=random -Dtests.locale=random
>>> -Dtests.timezone=random -Dtests.directory=random
>>> -Dtests.linedocsfile=europarl.lines.txt.gz -Dtests.luceneMatchVersion=8.5.0
>>> -Dtests.cleanthreads=perMethod
>>> -Djava.util.logging.config.file=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/logging.properties
>>> -Dtests.nightly=false -Dtests.weekly=false -Dtests.monster=false
>>> -Dtests.slow=true -Dtests.asserts=true -Dtests.multiplier=3
>>> -DtempDir=./temp -Djava.io.tmpdir=./temp
>>> -Dcommon.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene
>>> -Dclover.db.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/clover/db
>>> -Djava.security.policy=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/tests.policy
>>> -Dtests.LUCENE_VERSION=8.5.0 -Djetty.testMode=1 -Djetty.insecurerandom=1
>>> -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory
>>> -Djava.awt.headless=true -Djdk.map.althashing.threshold=0
>>> -Dtests.src.home=/home/jenkins/workspace/Lucene-Solr-8.x-Linux
>>> -Djava.security.egd=file:/dev/./urandom
>>> -Djunit4.childvm.cwd=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0
>>> -Djunit4.tempDir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp
>>> -Djunit4.childvm.id=0 -Djunit4.childvm.count=3 -Dfile.encoding=US-ASCII
>>> -Djava.security.manager=org.apache.lucene.util.TestSecurityManager
>>> -Dtests.filterstacks=true -Dtests.leaveTemporary=false
>>> -Dtests.badapples=false -classpath
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/codecs/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/test-framework/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/hamcrest-core-1.3.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/junit-4.12.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/randomizedtesting-runner-2.7.2.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java9:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/test:/home/jenkins/.ivy2/cache/com.carrotsearch.randomizedtesting/junit4-ant/jars/junit4-ant-2.7.2.jar
>>> com.carrotsearch.ant.tasks.junit4.slave.SlaveMainSafe -eventsfile
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_8917447844543437384674.events
>>> @/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_891226391883297247733.suites
>>> -stdin
>>>    [junit4] ERROR: JVM J0 ended with an exception: Forked process
>>> returned with error code: 134. Very likely a JVM crash.  See process stdout
>>> at:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>>>    [junit4]     at
>>> com.carrotsearch.ant.tasks.junit4.JUnit4.executeSlave(JUnit4.java:1542)
>>>    [junit4]     at
>>> com.carrotsearch.ant.tasks.junit4.JUnit4.access$000(JUnit4.java:123)
>>>    [junit4]     at
>>> com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:997)
>>>    [junit4]     at
>>> com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:994)
>>>    [junit4]     at
>>> java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
>>>    [junit4]     at
>>> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
>>>    [junit4]     at
>>> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
>>>    [junit4]     at java.base/java.lang.Thread.run(Thread.java:830)
>>>
>>> BUILD FAILED
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:634: The
>>> following error occurred while executing this line:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:578: The
>>> following error occurred while executing this line:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:59: The
>>> following error occurred while executing this line:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build.xml:50: The
>>> following error occurred while executing this line:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1590:
>>> The following error occurred while executing this line:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1117:
>>> At least one slave process threw an exception, first: Forked process
>>> returned with error code: 134. Very likely a JVM crash.  See process stdout
>>> at:
>>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>>>
>>> Total time: 15 minutes 31 seconds
>>> Build step 'Invoke Ant' marked build as failure
>>> Archiving artifacts
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> [WARNINGS] Skipping publisher since build result is FAILURE
>>> Recording test results
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> Email was triggered for: Failure - Any
>>> Sending email for trigger: Failure - Any
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>> Setting
>>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>>
>>> ---------------------------------------------------------------------
>>> To unsubscribe, e-mail: builds-unsubscribe@lucene.apache.org
>>> For additional commands, e-mail: builds-help@lucene.apache.org
>>
>>
>>
>> --
>> Sincerely yours
>> Mikhail Khludnev
>>
>

Re: [JENKINS] Lucene-Solr-8.x-Linux (64bit/jdk-13.0.1) - Build # 1684 - Failure!

Posted by Robert Muir <rc...@gmail.com>.
Looks like a bug with this garbage collector (ZGC). We should file a bug
report, the stack trace might be helpful to them:

Current CompileTask:
C2: 489037 23064 %     4
org.apache.lucene.index.RandomPostingsTester::verifyEnum @ 1415 (3617
bytes)

Stack: [0x00007f7be6564000,0x00007f7be6665000],
sp=0x00007f7be665fb60,  free space=1006k
Native frames: (J=compiled Java code, A=aot compiled Java code,
j=interpreted, Vv=VM code, C=native code)
V  [libjvm.so+0xce71c9]  PhaseIterGVN::transform_old(Node*)+0x159
V  [libjvm.so+0xce3874]  PhaseIterGVN::optimize()+0x134
V  [libjvm.so+0x1027184]
ZBarrierSetC2::insert_barriers_on_unsafe(PhaseIdealLoop*) const+0x364
V  [libjvm.so+0x10283b8]
ZBarrierSetC2::optimize_loops(PhaseIdealLoop*, LoopOptsMode,
VectorSet&, Node_Stack&, Node_List&) const+0x38
V  [libjvm.so+0xb21ac6]  PhaseIdealLoop::build_and_optimize(LoopOptsMode)+0xad6
V  [libjvm.so+0x638ecd]  PhaseIdealLoop::optimize(PhaseIterGVN&,
LoopOptsMode)+0x1dd
V  [libjvm.so+0x63703f]  Compile::Optimize()+0x83f
V  [libjvm.so+0x63860a]  Compile::Compile(ciEnv*, C2Compiler*,
ciMethod*, int, bool, bool, bool, DirectiveSet*)+0xd2a
V  [libjvm.so+0x55fadc]  C2Compiler::compile_method(ciEnv*, ciMethod*,
int, DirectiveSet*)+0xbc
V  [libjvm.so+0x64229d]
CompileBroker::invoke_compiler_on_method(CompileTask*)+0x3fd
V  [libjvm.so+0x643c70]  CompileBroker::compiler_thread_loop()+0x5d0
V  [libjvm.so+0xf6b9fe]  JavaThread::thread_main_inner()+0x1be
V  [libjvm.so+0xf707fd]  Thread::call_run()+0x10d
V  [libjvm.so+0xc875b7]  thread_native_entry(Thread*)+0xe7


On Sun, Dec 29, 2019 at 1:29 AM Mikhail Khludnev <mk...@apache.org> wrote:

> Hi, Dev.
>
> This happens though December. What we supposed to do?
>
>    [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883, tid=30952
>    [junit4] #
>    [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build
> 13.0.1+9)
>    [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode,
> tiered, z gc, linux-amd64)
>    [junit4] # Problematic frame:
>    [junit4] # V  [libjvm.so+0xce71c9]  PhaseIterGVN::transform_old
> (Node*)+0x159
>
> On Sat, Dec 28, 2019 at 1:09 PM Policeman Jenkins Server <
> jenkins@thetaphi.de> wrote:
>
>> Build: https://jenkins.thetaphi.de/job/Lucene-Solr-8.x-Linux/1684/
>> Java: 64bit/jdk-13.0.1 -XX:+UseCompressedOops
>> -XX:+UnlockExperimentalVMOptions -XX:+UseZGC
>>
>> All tests passed
>>
>> Build Log:
>> [...truncated 1396 lines...]
>>    [junit4] JVM J0: stdout was not empty, see:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>>    [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
>>    [junit4] #
>>    [junit4] # A fatal error has been detected by the Java Runtime
>> Environment:
>>    [junit4] #
>>    [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883,
>> tid=30952
>>    [junit4] #
>>    [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build
>> 13.0.1+9)
>>    [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode,
>> tiered, z gc, linux-amd64)
>>    [junit4] # Problematic frame:
>>    [junit4] # V  [libjvm.so+0xce71c9]
>> PhaseIterGVN::transform_old(Node*)+0x159
>>    [junit4] #
>>    [junit4] # No core dump will be written. Core dumps have been
>> disabled. To enable core dumping, try "ulimit -c unlimited" before starting
>> Java again
>>    [junit4] #
>>    [junit4] # An error report file with more information is saved as:
>>    [junit4] #
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/hs_err_pid30883.log
>>    [junit4] [thread 6488 also had an error]
>>    [junit4] #
>>    [junit4] # Compiler replay data is saved as:
>>    [junit4] #
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/replay_pid30883.log
>>    [junit4] #
>>    [junit4] # If you would like to submit a bug report, please visit:
>>    [junit4] #   https://github.com/AdoptOpenJDK/openjdk-build/issues
>>    [junit4] #
>>    [junit4] <<< JVM J0: EOF ----
>>
>> [...truncated 798 lines...]
>>    [junit4] ERROR: JVM J0 ended with an exception, command line:
>> /home/jenkins/tools/java/64bit/jdk-13.0.1/bin/java -XX:+UseCompressedOops
>> -XX:+UnlockExperimentalVMOptions -XX:+UseZGC
>> -XX:+HeapDumpOnOutOfMemoryError
>> -XX:HeapDumpPath=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/heapdumps
>> -ea -esa --illegal-access=deny -Dtests.prefix=tests
>> -Dtests.seed=C474FBC796E01686 -Xmx512M -Dtests.iters= -Dtests.verbose=false
>> -Dtests.infostream=false -Dtests.codec=random -Dtests.postingsformat=random
>> -Dtests.docvaluesformat=random -Dtests.locale=random
>> -Dtests.timezone=random -Dtests.directory=random
>> -Dtests.linedocsfile=europarl.lines.txt.gz -Dtests.luceneMatchVersion=8.5.0
>> -Dtests.cleanthreads=perMethod
>> -Djava.util.logging.config.file=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/logging.properties
>> -Dtests.nightly=false -Dtests.weekly=false -Dtests.monster=false
>> -Dtests.slow=true -Dtests.asserts=true -Dtests.multiplier=3
>> -DtempDir=./temp -Djava.io.tmpdir=./temp
>> -Dcommon.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene
>> -Dclover.db.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/clover/db
>> -Djava.security.policy=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/tests.policy
>> -Dtests.LUCENE_VERSION=8.5.0 -Djetty.testMode=1 -Djetty.insecurerandom=1
>> -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory
>> -Djava.awt.headless=true -Djdk.map.althashing.threshold=0
>> -Dtests.src.home=/home/jenkins/workspace/Lucene-Solr-8.x-Linux
>> -Djava.security.egd=file:/dev/./urandom
>> -Djunit4.childvm.cwd=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0
>> -Djunit4.tempDir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp
>> -Djunit4.childvm.id=0 -Djunit4.childvm.count=3 -Dfile.encoding=US-ASCII
>> -Djava.security.manager=org.apache.lucene.util.TestSecurityManager
>> -Dtests.filterstacks=true -Dtests.leaveTemporary=false
>> -Dtests.badapples=false -classpath
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/codecs/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/test-framework/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/hamcrest-core-1.3.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/junit-4.12.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/randomizedtesting-runner-2.7.2.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java9:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/test:/home/jenkins/.ivy2/cache/com.carrotsearch.randomizedtesting/junit4-ant/jars/junit4-ant-2.7.2.jar
>> com.carrotsearch.ant.tasks.junit4.slave.SlaveMainSafe -eventsfile
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_8917447844543437384674.events
>> @/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_891226391883297247733.suites
>> -stdin
>>    [junit4] ERROR: JVM J0 ended with an exception: Forked process
>> returned with error code: 134. Very likely a JVM crash.  See process stdout
>> at:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>>    [junit4]     at
>> com.carrotsearch.ant.tasks.junit4.JUnit4.executeSlave(JUnit4.java:1542)
>>    [junit4]     at
>> com.carrotsearch.ant.tasks.junit4.JUnit4.access$000(JUnit4.java:123)
>>    [junit4]     at
>> com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:997)
>>    [junit4]     at
>> com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:994)
>>    [junit4]     at
>> java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
>>    [junit4]     at
>> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
>>    [junit4]     at
>> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
>>    [junit4]     at java.base/java.lang.Thread.run(Thread.java:830)
>>
>> BUILD FAILED
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:634: The
>> following error occurred while executing this line:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:578: The
>> following error occurred while executing this line:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:59: The following
>> error occurred while executing this line:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build.xml:50: The
>> following error occurred while executing this line:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1590:
>> The following error occurred while executing this line:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1117:
>> At least one slave process threw an exception, first: Forked process
>> returned with error code: 134. Very likely a JVM crash.  See process stdout
>> at:
>> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>>
>> Total time: 15 minutes 31 seconds
>> Build step 'Invoke Ant' marked build as failure
>> Archiving artifacts
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> [WARNINGS] Skipping publisher since build result is FAILURE
>> Recording test results
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> Email was triggered for: Failure - Any
>> Sending email for trigger: Failure - Any
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>> Setting
>> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>>
>> ---------------------------------------------------------------------
>> To unsubscribe, e-mail: builds-unsubscribe@lucene.apache.org
>> For additional commands, e-mail: builds-help@lucene.apache.org
>
>
>
> --
> Sincerely yours
> Mikhail Khludnev
>

Re: [JENKINS] Lucene-Solr-8.x-Linux (64bit/jdk-13.0.1) - Build # 1684 - Failure!

Posted by Mikhail Khludnev <mk...@apache.org>.
Hi, Dev.

This happens though December. What we supposed to do?

   [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883, tid=30952
   [junit4] #
   [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build
13.0.1+9)
   [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode,
tiered, z gc, linux-amd64)
   [junit4] # Problematic frame:
   [junit4] # V  [libjvm.so+0xce71c9]  PhaseIterGVN::transform_old
(Node*)+0x159

On Sat, Dec 28, 2019 at 1:09 PM Policeman Jenkins Server <
jenkins@thetaphi.de> wrote:

> Build: https://jenkins.thetaphi.de/job/Lucene-Solr-8.x-Linux/1684/
> Java: 64bit/jdk-13.0.1 -XX:+UseCompressedOops
> -XX:+UnlockExperimentalVMOptions -XX:+UseZGC
>
> All tests passed
>
> Build Log:
> [...truncated 1396 lines...]
>    [junit4] JVM J0: stdout was not empty, see:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>    [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
>    [junit4] #
>    [junit4] # A fatal error has been detected by the Java Runtime
> Environment:
>    [junit4] #
>    [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883, tid=30952
>    [junit4] #
>    [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build
> 13.0.1+9)
>    [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode,
> tiered, z gc, linux-amd64)
>    [junit4] # Problematic frame:
>    [junit4] # V  [libjvm.so+0xce71c9]
> PhaseIterGVN::transform_old(Node*)+0x159
>    [junit4] #
>    [junit4] # No core dump will be written. Core dumps have been disabled.
> To enable core dumping, try "ulimit -c unlimited" before starting Java again
>    [junit4] #
>    [junit4] # An error report file with more information is saved as:
>    [junit4] #
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/hs_err_pid30883.log
>    [junit4] [thread 6488 also had an error]
>    [junit4] #
>    [junit4] # Compiler replay data is saved as:
>    [junit4] #
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/replay_pid30883.log
>    [junit4] #
>    [junit4] # If you would like to submit a bug report, please visit:
>    [junit4] #   https://github.com/AdoptOpenJDK/openjdk-build/issues
>    [junit4] #
>    [junit4] <<< JVM J0: EOF ----
>
> [...truncated 798 lines...]
>    [junit4] ERROR: JVM J0 ended with an exception, command line:
> /home/jenkins/tools/java/64bit/jdk-13.0.1/bin/java -XX:+UseCompressedOops
> -XX:+UnlockExperimentalVMOptions -XX:+UseZGC
> -XX:+HeapDumpOnOutOfMemoryError
> -XX:HeapDumpPath=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/heapdumps
> -ea -esa --illegal-access=deny -Dtests.prefix=tests
> -Dtests.seed=C474FBC796E01686 -Xmx512M -Dtests.iters= -Dtests.verbose=false
> -Dtests.infostream=false -Dtests.codec=random -Dtests.postingsformat=random
> -Dtests.docvaluesformat=random -Dtests.locale=random
> -Dtests.timezone=random -Dtests.directory=random
> -Dtests.linedocsfile=europarl.lines.txt.gz -Dtests.luceneMatchVersion=8.5.0
> -Dtests.cleanthreads=perMethod
> -Djava.util.logging.config.file=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/logging.properties
> -Dtests.nightly=false -Dtests.weekly=false -Dtests.monster=false
> -Dtests.slow=true -Dtests.asserts=true -Dtests.multiplier=3
> -DtempDir=./temp -Djava.io.tmpdir=./temp
> -Dcommon.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene
> -Dclover.db.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/clover/db
> -Djava.security.policy=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/tests.policy
> -Dtests.LUCENE_VERSION=8.5.0 -Djetty.testMode=1 -Djetty.insecurerandom=1
> -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory
> -Djava.awt.headless=true -Djdk.map.althashing.threshold=0
> -Dtests.src.home=/home/jenkins/workspace/Lucene-Solr-8.x-Linux
> -Djava.security.egd=file:/dev/./urandom
> -Djunit4.childvm.cwd=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0
> -Djunit4.tempDir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp
> -Djunit4.childvm.id=0 -Djunit4.childvm.count=3 -Dfile.encoding=US-ASCII
> -Djava.security.manager=org.apache.lucene.util.TestSecurityManager
> -Dtests.filterstacks=true -Dtests.leaveTemporary=false
> -Dtests.badapples=false -classpath
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/codecs/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/test-framework/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/hamcrest-core-1.3.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/junit-4.12.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/randomizedtesting-runner-2.7.2.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java9:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/test:/home/jenkins/.ivy2/cache/com.carrotsearch.randomizedtesting/junit4-ant/jars/junit4-ant-2.7.2.jar
> com.carrotsearch.ant.tasks.junit4.slave.SlaveMainSafe -eventsfile
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_8917447844543437384674.events
> @/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_891226391883297247733.suites
> -stdin
>    [junit4] ERROR: JVM J0 ended with an exception: Forked process returned
> with error code: 134. Very likely a JVM crash.  See process stdout at:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>    [junit4]     at
> com.carrotsearch.ant.tasks.junit4.JUnit4.executeSlave(JUnit4.java:1542)
>    [junit4]     at
> com.carrotsearch.ant.tasks.junit4.JUnit4.access$000(JUnit4.java:123)
>    [junit4]     at
> com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:997)
>    [junit4]     at
> com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:994)
>    [junit4]     at
> java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
>    [junit4]     at
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
>    [junit4]     at
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
>    [junit4]     at java.base/java.lang.Thread.run(Thread.java:830)
>
> BUILD FAILED
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:634: The following
> error occurred while executing this line:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:578: The following
> error occurred while executing this line:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:59: The following
> error occurred while executing this line:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build.xml:50: The
> following error occurred while executing this line:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1590:
> The following error occurred while executing this line:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1117:
> At least one slave process threw an exception, first: Forked process
> returned with error code: 134. Very likely a JVM crash.  See process stdout
> at:
> /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
>
> Total time: 15 minutes 31 seconds
> Build step 'Invoke Ant' marked build as failure
> Archiving artifacts
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> [WARNINGS] Skipping publisher since build result is FAILURE
> Recording test results
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> Email was triggered for: Failure - Any
> Sending email for trigger: Failure - Any
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
> Setting
> ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: builds-unsubscribe@lucene.apache.org
> For additional commands, e-mail: builds-help@lucene.apache.org



-- 
Sincerely yours
Mikhail Khludnev

[JENKINS] Lucene-Solr-8.x-Linux (64bit/jdk-13.0.1) - Build # 1684 - Failure!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-8.x-Linux/1684/
Java: 64bit/jdk-13.0.1 -XX:+UseCompressedOops -XX:+UnlockExperimentalVMOptions -XX:+UseZGC

All tests passed

Build Log:
[...truncated 1396 lines...]
   [junit4] JVM J0: stdout was not empty, see: /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] #
   [junit4] # A fatal error has been detected by the Java Runtime Environment:
   [junit4] #
   [junit4] #  SIGSEGV (0xb) at pc=0x00007f7e1c0e01c9, pid=30883, tid=30952
   [junit4] #
   [junit4] # JRE version: OpenJDK Runtime Environment (13.0.1+9) (build 13.0.1+9)
   [junit4] # Java VM: OpenJDK 64-Bit Server VM (13.0.1+9, mixed mode, tiered, z gc, linux-amd64)
   [junit4] # Problematic frame:
   [junit4] # V  [libjvm.so+0xce71c9]  PhaseIterGVN::transform_old(Node*)+0x159
   [junit4] #
   [junit4] # No core dump will be written. Core dumps have been disabled. To enable core dumping, try "ulimit -c unlimited" before starting Java again
   [junit4] #
   [junit4] # An error report file with more information is saved as:
   [junit4] # /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/hs_err_pid30883.log
   [junit4] [thread 6488 also had an error]
   [junit4] #
   [junit4] # Compiler replay data is saved as:
   [junit4] # /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0/replay_pid30883.log
   [junit4] #
   [junit4] # If you would like to submit a bug report, please visit:
   [junit4] #   https://github.com/AdoptOpenJDK/openjdk-build/issues
   [junit4] #
   [junit4] <<< JVM J0: EOF ----

[...truncated 798 lines...]
   [junit4] ERROR: JVM J0 ended with an exception, command line: /home/jenkins/tools/java/64bit/jdk-13.0.1/bin/java -XX:+UseCompressedOops -XX:+UnlockExperimentalVMOptions -XX:+UseZGC -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/heapdumps -ea -esa --illegal-access=deny -Dtests.prefix=tests -Dtests.seed=C474FBC796E01686 -Xmx512M -Dtests.iters= -Dtests.verbose=false -Dtests.infostream=false -Dtests.codec=random -Dtests.postingsformat=random -Dtests.docvaluesformat=random -Dtests.locale=random -Dtests.timezone=random -Dtests.directory=random -Dtests.linedocsfile=europarl.lines.txt.gz -Dtests.luceneMatchVersion=8.5.0 -Dtests.cleanthreads=perMethod -Djava.util.logging.config.file=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/logging.properties -Dtests.nightly=false -Dtests.weekly=false -Dtests.monster=false -Dtests.slow=true -Dtests.asserts=true -Dtests.multiplier=3 -DtempDir=./temp -Djava.io.tmpdir=./temp -Dcommon.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene -Dclover.db.dir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/clover/db -Djava.security.policy=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/tools/junit4/tests.policy -Dtests.LUCENE_VERSION=8.5.0 -Djetty.testMode=1 -Djetty.insecurerandom=1 -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory -Djava.awt.headless=true -Djdk.map.althashing.threshold=0 -Dtests.src.home=/home/jenkins/workspace/Lucene-Solr-8.x-Linux -Djava.security.egd=file:/dev/./urandom -Djunit4.childvm.cwd=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/J0 -Djunit4.tempDir=/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp -Djunit4.childvm.id=0 -Djunit4.childvm.count=3 -Dfile.encoding=US-ASCII -Djava.security.manager=org.apache.lucene.util.TestSecurityManager -Dtests.filterstacks=true -Dtests.leaveTemporary=false -Dtests.badapples=false -classpath /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/codecs/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/test-framework/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/hamcrest-core-1.3.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/junit-4.12.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/test-framework/lib/randomizedtesting-runner-2.7.2.jar:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java9:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/java:/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/classes/test:/home/jenkins/.ivy2/cache/com.carrotsearch.randomizedtesting/junit4-ant/jars/junit4-ant-2.7.2.jar com.carrotsearch.ant.tasks.junit4.slave.SlaveMainSafe -eventsfile /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_8917447844543437384674.events @/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_891226391883297247733.suites -stdin
   [junit4] ERROR: JVM J0 ended with an exception: Forked process returned with error code: 134. Very likely a JVM crash.  See process stdout at: /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4.executeSlave(JUnit4.java:1542)
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4.access$000(JUnit4.java:123)
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:997)
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:994)
   [junit4] 	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
   [junit4] 	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
   [junit4] 	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
   [junit4] 	at java.base/java.lang.Thread.run(Thread.java:830)

BUILD FAILED
/home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:634: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:578: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-8.x-Linux/build.xml:59: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build.xml:50: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1590: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/common-build.xml:1117: At least one slave process threw an exception, first: Forked process returned with error code: 134. Very likely a JVM crash.  See process stdout at: /home/jenkins/workspace/Lucene-Solr-8.x-Linux/lucene/build/core/test/temp/junit4-J0-20191228_095417_89118188758387783302429.sysout

Total time: 15 minutes 31 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
[WARNINGS] Skipping publisher since build result is FAILURE
Recording test results
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/home/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2