You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2021/08/25 16:43:29 UTC

[JENKINS] Lucene » Lucene-Solr-NightlyTests-8.x - Build # 371 - Unstable!

Build: https://ci-builds.apache.org/job/Lucene/job/Lucene-Solr-NightlyTests-8.x/371/

1 tests failed.
FAILED:  org.apache.solr.handler.TestHdfsBackupRestoreCore.test

Error Message:
Server returned HTTP response code: 500 for URL: http://127.0.0.1:35761/solr/admin/cores?action=BACKUPCORE&core=HdfsBackupRestore_shard1_replica_n1&name=bzk&shardBackupId=md_standalone_0&repository=hdfs

Stack Trace:
java.io.IOException: Server returned HTTP response code: 500 for URL: http://127.0.0.1:35761/solr/admin/cores?action=BACKUPCORE&core=HdfsBackupRestore_shard1_replica_n1&name=bzk&shardBackupId=md_standalone_0&repository=hdfs
	at __randomizedtesting.SeedInfo.seed([70D348CD0259E0D7:F8877717ACA58D2F]:0)
	at sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1900)
	at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1498)
	at java.net.URL.openStream(URL.java:1092)
	at org.apache.solr.handler.BackupRestoreUtils.executeHttpRequest(BackupRestoreUtils.java:98)
	at org.apache.solr.handler.BackupRestoreUtils.runCoreAdminCommand(BackupRestoreUtils.java:86)
	at org.apache.solr.handler.TestHdfsBackupRestoreCore.test(TestHdfsBackupRestoreCore.java:203)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 13663 lines...]
   [junit4] Suite: org.apache.solr.handler.TestHdfsBackupRestoreCore
   [junit4]   2> 420498 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.SolrTestCase Setting 'solr.default.confdir' system property to test-framework derived value of '/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/server/solr/configsets/_default/conf'
   [junit4]   2> 420500 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 420501 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/data-dir-44-001
   [junit4]   2> 420501 WARN  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=1 numCloses=1
   [junit4]   2> 420502 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 420503 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=None)
   [junit4]   2> 420503 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.s.c.SolrCloudTestCase Using per-replica state
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 420837 WARN  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 420842 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.Server jetty-9.4.41.v20210516; built: 2021-05-16T23:56:28.993Z; git: 98607f93c7833e7dc59489b13f3cb0a114fb9f4c; jvm 1.8.0_291-b10
   [junit4]   2> 420858 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 420858 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 420858 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 420864 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@783587f1{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 421026 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@1165d9cb{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost_localdomain-43253-hadoop-hdfs-3_2_2-tests_jar-_-any-26110454352027119/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/hdfs}
   [junit4]   2> 421027 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@5000c713{HTTP/1.1, (http/1.1)}{localhost.localdomain:43253}
   [junit4]   2> 421027 INFO  (SUITE-TestHdfsBackupRestoreCore-seed#[70D348CD0259E0D7]-worker) [     ] o.e.j.s.Server Started @421056ms
   [junit4]   2> 421385 WARN  (Listener at localhost.localdomain/39737) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 421398 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.Server jetty-9.4.41.v20210516; built: 2021-05-16T23:56:28.993Z; git: 98607f93c7833e7dc59489b13f3cb0a114fb9f4c; jvm 1.8.0_291-b10
   [junit4]   2> 421410 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 421410 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 421410 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 421426 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4d2b4b9d{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 421554 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@5b47aeda{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-35067-hadoop-hdfs-3_2_2-tests_jar-_-any-1662387478397773292/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/datanode}
   [junit4]   2> 421554 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.AbstractConnector Started ServerConnector@4f6a356f{HTTP/1.1, (http/1.1)}{localhost:35067}
   [junit4]   2> 421554 INFO  (Listener at localhost.localdomain/39737) [     ] o.e.j.s.Server Started @421583ms
   [junit4]   2> 421709 WARN  (Listener at localhost.localdomain/45141) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 421711 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.Server jetty-9.4.41.v20210516; built: 2021-05-16T23:56:28.993Z; git: 98607f93c7833e7dc59489b13f3cb0a114fb9f4c; jvm 1.8.0_291-b10
   [junit4]   2> 421714 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 421714 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 421714 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 421718 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4422c6e9{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 421930 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@53d1041b{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-33583-hadoop-hdfs-3_2_2-tests_jar-_-any-2112581086743682434/webapp/,AVAILABLE}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/datanode}
   [junit4]   2> 421930 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.AbstractConnector Started ServerConnector@5eca1439{HTTP/1.1, (http/1.1)}{localhost:33583}
   [junit4]   2> 421930 INFO  (Listener at localhost.localdomain/45141) [     ] o.e.j.s.Server Started @421959ms
   [junit4]   2> 421931 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xe72a89d47e8577ae: Processing first storage report for DS-9dc2a2b6-600f-424c-8519-44767712dbfe from datanode dc1a14c8-ef32-4acb-a5d1-68b2fda02bb8
   [junit4]   2> 421931 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xe72a89d47e8577ae: from storage DS-9dc2a2b6-600f-424c-8519-44767712dbfe node DatanodeRegistration(127.0.0.1:39377, datanodeUuid=dc1a14c8-ef32-4acb-a5d1-68b2fda02bb8, infoPort=44981, infoSecurePort=0, ipcPort=45141, storageInfo=lv=-57;cid=testClusterID;nsid=136974695;c=1629903877233), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 421931 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xe72a89d47e8577ae: Processing first storage report for DS-c8239095-1ae7-45b9-a6f8-14802fad349a from datanode dc1a14c8-ef32-4acb-a5d1-68b2fda02bb8
   [junit4]   2> 421931 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xe72a89d47e8577ae: from storage DS-c8239095-1ae7-45b9-a6f8-14802fad349a node DatanodeRegistration(127.0.0.1:39377, datanodeUuid=dc1a14c8-ef32-4acb-a5d1-68b2fda02bb8, infoPort=44981, infoSecurePort=0, ipcPort=45141, storageInfo=lv=-57;cid=testClusterID;nsid=136974695;c=1629903877233), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 422134 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x4526afac34ab3b2b: Processing first storage report for DS-37bad23f-6cb2-4eff-b73b-bc5c2a4ca5bc from datanode 1d99bb70-cab6-4588-b98b-5ac57a5408b4
   [junit4]   2> 422134 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x4526afac34ab3b2b: from storage DS-37bad23f-6cb2-4eff-b73b-bc5c2a4ca5bc node DatanodeRegistration(127.0.0.1:33737, datanodeUuid=1d99bb70-cab6-4588-b98b-5ac57a5408b4, infoPort=35409, infoSecurePort=0, ipcPort=45527, storageInfo=lv=-57;cid=testClusterID;nsid=136974695;c=1629903877233), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 422134 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x4526afac34ab3b2b: Processing first storage report for DS-e605fc95-ecbf-4832-b6b6-710aac256c23 from datanode 1d99bb70-cab6-4588-b98b-5ac57a5408b4
   [junit4]   2> 422134 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x4526afac34ab3b2b: from storage DS-e605fc95-ecbf-4832-b6b6-710aac256c23 node DatanodeRegistration(127.0.0.1:33737, datanodeUuid=1d99bb70-cab6-4588-b98b-5ac57a5408b4, infoPort=35409, infoSecurePort=0, ipcPort=45527, storageInfo=lv=-57;cid=testClusterID;nsid=136974695;c=1629903877233), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 422222 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 1 servers in /home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002
   [junit4]   2> 422222 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 422227 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port: 0.0.0.0/0.0.0.0:0
   [junit4]   2> 422227 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 422227 WARN  (ZkTestServer Run Thread) [     ] o.a.z.s.ServerCnxnFactory maxCnxns is not configured, using default value 0.
   [junit4]   2> 422327 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer start zk server on port: 42207
   [junit4]   2> 422327 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:42207
   [junit4]   2> 422327 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:42207
   [junit4]   2> 422327 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 42207
   [junit4]   2> 422336 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 422351 INFO  (zkConnectionManagerCallback-3417-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 422351 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 422366 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 422377 INFO  (zkConnectionManagerCallback-3419-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 422378 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 422379 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 422382 INFO  (zkConnectionManagerCallback-3421-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 422382 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 422490 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
   [junit4]   2> 422490 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 2 ...
   [junit4]   2> 422491 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.Server jetty-9.4.41.v20210516; built: 2021-05-16T23:56:28.993Z; git: 98607f93c7833e7dc59489b13f3cb0a114fb9f4c; jvm 1.8.0_291-b10
   [junit4]   2> 422537 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 422537 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 422537 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 422546 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@437eb845{/solr,null,AVAILABLE}
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@14328b34{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:35761}
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.e.j.s.Server Started @422576ms
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, zkHost=127.0.0.1:42207/solr, hostPort=35761}
   [junit4]   2> 422547 ERROR (jetty-launcher-3422-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 8.10.0
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: /home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr
   [junit4]   2> 422547 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2021-08-25T15:04:39.236Z
   [junit4]   2> 422553 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 422555 INFO  (zkConnectionManagerCallback-3424-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 422556 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 422558 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 422563 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@5b9801f2, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 424980 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 424982 WARN  (jetty-launcher-3422-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5368f334[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 424982 WARN  (jetty-launcher-3422-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5368f334[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 424987 WARN  (jetty-launcher-3422-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@22f28216[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 424987 WARN  (jetty-launcher-3422-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@22f28216[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 424988 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:42207/solr
   [junit4]   2> 424990 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 424994 INFO  (zkConnectionManagerCallback-3435-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 424994 INFO  (jetty-launcher-3422-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 424998 WARN  (jetty-launcher-3422-thread-1-SendThread(127.0.0.1:42207)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x100d75dd7190004.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x100d75dd7190004, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:77)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x100d75dd7190004, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:77) ~[zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:350) ~[zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1275) [zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 425118 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 425120 INFO  (zkConnectionManagerCallback-3437-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 425120 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 425205 WARN  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.ZkController Contents of zookeeper /security.json are world-readable; consider setting up ACLs as described in https://solr.apache.org/guide/zookeeper-access-control.html
   [junit4]   2> 425211 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:35761_solr
   [junit4]   2> 425212 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.Overseer Overseer (id=72294392078598149-127.0.0.1:35761_solr-n_0000000000) starting
   [junit4]   2> 425218 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:35761_solr
   [junit4]   2> 425218 INFO  (OverseerStateUpdate-72294392078598149-127.0.0.1:35761_solr-n_0000000000) [n:127.0.0.1:35761_solr     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:35761_solr
   [junit4]   2> 425220 INFO  (OverseerStateUpdate-72294392078598149-127.0.0.1:35761_solr-n_0000000000) [n:127.0.0.1:35761_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 425221 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.p.PackageLoader /packages.json updated to version -1
   [junit4]   2> 425222 WARN  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.CoreContainer Not all security plugins configured!  authentication=disabled authorization=disabled.  Solr is only as secure as you make it. Consider configuring authentication/authorization before exposing Solr to users internal or external.  See https://s.apache.org/solrsecurity for more info
   [junit4]   2> 425222 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://localhost.localdomain:39737/solr,solr.hdfs.confdir=,solr.hdfs.permissions.umask-mode=000}}
   [junit4]   2> 425222 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://localhost.localdomain:39737/solr,solr.hdfs.confdir=,solr.hdfs.permissions.umask-mode=000}}
   [junit4]   2> 425247 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 425267 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5b9801f2
   [junit4]   2> 425275 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5b9801f2
   [junit4]   2> 425275 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5b9801f2
   [junit4]   2> 425276 INFO  (jetty-launcher-3422-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1
   [junit4]   2> 425300 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=1
   [junit4]   2> 425301 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 425307 INFO  (zkConnectionManagerCallback-3454-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 425307 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 425309 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 425312 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:42207/solr ready
   [junit4]   2> 425366 INFO  (TEST-TestHdfsBackupRestoreCore.test-seed#[70D348CD0259E0D7]) [     ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 425370 INFO  (OverseerThreadFactory-3444-thread-1-processing-n:127.0.0.1:35761_solr) [n:127.0.0.1:35761_solr     ] o.a.s.c.a.c.CreateCollectionCmd Create collection HdfsBackupRestore
   [junit4]   2> 425480 INFO  (OverseerStateUpdate-72294392078598149-127.0.0.1:35761_solr-n_0000000000) [n:127.0.0.1:35761_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "core":"HdfsBackupRestore_shard1_replica_n1",
   [junit4]   2>   "node_name":"127.0.0.1:35761_solr",
   [junit4]   2>   "base_url":"http://127.0.0.1:35761/solr",
   [junit4]   2>   "collection":"HdfsBackupRestore",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 425684 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr    x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=HdfsBackupRestore_shard1_replica_n1&action=CREATE&numShards=1&collection=HdfsBackupRestore&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 425684 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr    x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient core cache for max 2147483647 cores with initial capacity of 1024
   [junit4]   2> 425699 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.10.0
   [junit4]   2> 425702 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.s.IndexSchema Schema name=minimal
   [junit4]   2> 425702 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 425702 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'HdfsBackupRestore_shard1_replica_n1' using configuration from configset conf1, trusted=true
   [junit4]   2> 425703 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.HdfsBackupRestore.shard1.replica_n1' (registry 'solr.core.HdfsBackupRestore.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5b9801f2
   [junit4]   2> 425713 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SolrCore [[HdfsBackupRestore_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1/HdfsBackupRestore_shard1_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1/HdfsBackupRestore_shard1_replica_n1/data/]
   [junit4]   2> 425789 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 425790 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 425791 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 425791 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 425796 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 425797 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 425797 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000 ms
   [junit4]   2> 425797 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1709078093481639936
   [junit4]   2> 425801 INFO  (searcherExecutor-3456-thread-1-processing-n:127.0.0.1:35761_solr x:HdfsBackupRestore_shard1_replica_n1 c:HdfsBackupRestore s:shard1 r:core_node2) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SolrCore [HdfsBackupRestore_shard1_replica_n1]  Registered new searcher autowarm time: 0 ms
   [junit4]   2> 425807 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/HdfsBackupRestore/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 425807 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/HdfsBackupRestore/leaders/shard1
   [junit4]   2> 425813 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 425813 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 425813 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:35761/solr/HdfsBackupRestore_shard1_replica_n1/
   [junit4]   2> 425814 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 425815 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:35761/solr/HdfsBackupRestore_shard1_replica_n1/ has no replicas
   [junit4]   2> 425815 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/HdfsBackupRestore/leaders/shard1/leader after winning as /collections/HdfsBackupRestore/leader_elect/shard1/election/72294392078598149-core_node2-n_0000000000
   [junit4]   2> 425819 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:35761/solr/HdfsBackupRestore_shard1_replica_n1/ shard1
   [junit4]   2> 425921 INFO  (zkCallback-3436-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/HdfsBackupRestore/state.json] for collection [HdfsBackupRestore] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 425922 INFO  (zkCallback-3436-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/HdfsBackupRestore/state.json] for collection [HdfsBackupRestore] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 425923 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 425931 INFO  (qtp452290074-6118) [n:127.0.0.1:35761_solr     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=HdfsBackupRestore_shard1_replica_n1&action=CREATE&numShards=1&collection=HdfsBackupRestore&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=247
   [junit4]   2> 425934 INFO  (qtp452290074-6116) [n:127.0.0.1:35761_solr     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 426029 INFO  (zkCallback-3436-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/HdfsBackupRestore/state.json] for collection [HdfsBackupRestore] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 426029 INFO  (zkCallback-3436-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/HdfsBackupRestore/state.json] for collection [HdfsBackupRestore] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 426029 INFO  (zkCallback-3436-thread-3) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/HdfsBackupRestore/state.json] for collection [HdfsBackupRestore] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 426030 INFO  (qtp452290074-6116) [n:127.0.0.1:35761_solr     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=HdfsBackupRestore&nrtReplicas=1&action=CREATE&numShards=1&wt=javabin&version=2} status=0 QTime=662
   [junit4]   2> 426037 INFO  (qtp452290074-6119) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/HdfsBackupRestore/terms/shard1 to Terms{values={core_node2=1}, version=1}
   [junit4]   2> 426037 INFO  (qtp452290074-6119) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.p.LogUpdateProcessorFactory [HdfsBackupRestore_shard1_replica_n1]  webapp=/solr path=/update params={wt=javabin&version=2}{deleteByQuery=*:* (-1709078093729103872)} 0 5
   [junit4]   2> 426038 INFO  (TEST-TestHdfsBackupRestoreCore.test-seed#[70D348CD0259E0D7]) [     ] o.a.s.h.BackupRestoreUtils Indexing 60 test docs
   [junit4]   2> 426044 INFO  (qtp452290074-6115) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.p.LogUpdateProcessorFactory [HdfsBackupRestore_shard1_replica_n1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[0 (1709078093735395328), 1 (1709078093737492480), 2 (1709078093737492481), 3 (1709078093737492482), 4 (1709078093737492483), 5 (1709078093737492484), 6 (1709078093737492485), 7 (1709078093738541056), 8 (1709078093738541057), 9 (1709078093738541058), ... (60 adds)]} 0 5
   [junit4]   2> 426140 INFO  (searcherExecutor-3456-thread-1-processing-n:127.0.0.1:35761_solr x:HdfsBackupRestore_shard1_replica_n1 c:HdfsBackupRestore s:shard1 r:core_node2) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.SolrCore [HdfsBackupRestore_shard1_replica_n1]  Registered new searcher autowarm time: 0 ms
   [junit4]   2> 426140 INFO  (qtp452290074-6117) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.u.p.LogUpdateProcessorFactory [HdfsBackupRestore_shard1_replica_n1]  webapp=/solr path=/update params={_stateVer_=HdfsBackupRestore:4&waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 95
   [junit4]   2> 426146 INFO  (TEST-TestHdfsBackupRestoreCore.test-seed#[70D348CD0259E0D7]) [     ] o.a.s.h.TestHdfsBackupRestoreCore Running Backup via core admin api
   [junit4]   2> 426153 WARN  (qtp452290074-6116) [n:127.0.0.1:35761_solr    x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.b.r.HdfsBackupRepository HDFS support in Solr has been deprecated as of 8.6. See SOLR-14021 for details.
   [junit4]   2> 426154 INFO  (qtp452290074-6116) [n:127.0.0.1:35761_solr    x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:39737/solr
   [junit4]   2> 426154 INFO  (qtp452290074-6116) [n:127.0.0.1:35761_solr    x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 426169 INFO  (qtp452290074-6116) [n:127.0.0.1:35761_solr c:HdfsBackupRestore s:shard1 r:core_node2 x:HdfsBackupRestore_shard1_replica_n1 ] o.a.s.h.IncrementalShardBackup Creating backup snapshot at hdfs://localhost.localdomain:39737/backup shardBackupMetadataFile:org.apache.solr.core.backup.ShardBackupId@75a4af6b
   [junit4]   2> 426173 ERROR (qtp452290074-6116) [n:127.0.0.1:35761_solr     ] o.a.s.h.RequestHandlerBase org.apache.solr.common.SolrException: Failed to backup core=HdfsBackupRestore_shard1_replica_n1 because org.apache.lucene.index.CorruptIndexException: codec footer mismatch (file truncated?): actual footer=808464432 vs expected footer=-1071082520 (resource=BufferedChecksumIndexInput(MMapIndexInput(path="/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1/HdfsBackupRestore_shard1_replica_n1/data/index/_0.si")))
   [junit4]   2> 	at org.apache.solr.handler.admin.BackupCoreOp.execute(BackupCoreOp.java:90)
   [junit4]   2> 	at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:367)
   [junit4]   2> 	at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:397)
   [junit4]   2> 	at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:181)
   [junit4]   2> 	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:216)
   [junit4]   2> 	at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:836)
   [junit4]   2> 	at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:800)
   [junit4]   2> 	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:545)
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:427)
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:357)
   [junit4]   2> 	at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
   [junit4]   2> 	at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:548)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]   2> 	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1435)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501)
   [junit4]   2> 	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1350)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]   2> 	at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:763)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]   2> 	at org.eclipse.jetty.server.Server.handle(Server.java:516)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:388)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:633)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:380)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
   [junit4]   2> 	at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
   [junit4]   2> 	at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)
   [junit4]   2> 	at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:882)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1036)
   [junit4]   2> 	at java.lang.Thread.run(Thread.java:748)
   [junit4]   2> Caused by: org.apache.lucene.index.CorruptIndexException: codec footer mismatch (file truncated?): actual footer=808464432 vs expected footer=-1071082520 (resource=BufferedChecksumIndexInput(MMapIndexInput(path="/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1/HdfsBackupRestore_shard1_replica_n1/data/index/_0.si")))
   [junit4]   2> 	at org.apache.lucene.codecs.CodecUtil.validateFooter(CodecUtil.java:523)
   [junit4]   2> 	at org.apache.lucene.codecs.CodecUtil.retrieveChecksum(CodecUtil.java:490)
   [junit4]   2> 	at org.apache.solr.core.backup.repository.BackupRepository.checksum(BackupRepository.java:266)
   [junit4]   2> 	at org.apache.solr.handler.IncrementalShardBackup.incrementalCopy(IncrementalShardBackup.java:178)
   [junit4]   2> 	at org.apache.solr.handler.IncrementalShardBackup.backup(IncrementalShardBackup.java:143)
   [junit4]   2> 	at org.apache.solr.handler.IncrementalShardBackup.backup(IncrementalShardBackup.java:83)
   [junit4]   2> 	at org.apache.solr.handler.admin.BackupCoreOp.execute(BackupCoreOp.java:72)
   [junit4]   2> 	... 40 more
   [junit4]   2> 
   [junit4]   2> 426173 INFO  (qtp452290074-6116) [n:127.0.0.1:35761_solr     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={core=HdfsBackupRestore_shard1_replica_n1&name=bzk&shardBackupId=md_standalone_0&action=BACKUPCORE&repository=hdfs} status=500 QTime=23
   [junit4]   2> 426173 ERROR (qtp452290074-6116) [n:127.0.0.1:35761_solr     ] o.a.s.s.HttpSolrCall org.apache.solr.common.SolrException: Failed to backup core=HdfsBackupRestore_shard1_replica_n1 because org.apache.lucene.index.CorruptIndexException: codec footer mismatch (file truncated?): actual footer=808464432 vs expected footer=-1071082520 (resource=BufferedChecksumIndexInput(MMapIndexInput(path="/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1/HdfsBackupRestore_shard1_replica_n1/data/index/_0.si")))
   [junit4]   2> 	at org.apache.solr.handler.admin.BackupCoreOp.execute(BackupCoreOp.java:90)
   [junit4]   2> 	at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:367)
   [junit4]   2> 	at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:397)
   [junit4]   2> 	at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:181)
   [junit4]   2> 	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:216)
   [junit4]   2> 	at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:836)
   [junit4]   2> 	at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:800)
   [junit4]   2> 	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:545)
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:427)
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:357)
   [junit4]   2> 	at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:166)
   [junit4]   2> 	at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:548)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]   2> 	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1435)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501)
   [junit4]   2> 	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1350)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]   2> 	at org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:322)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:763)
   [junit4]   2> 	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
   [junit4]   2> 	at org.eclipse.jetty.server.Server.handle(Server.java:516)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:388)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:633)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:380)
   [junit4]   2> 	at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
   [junit4]   2> 	at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
   [junit4]   2> 	at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)
   [junit4]   2> 	at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:882)
   [junit4]   2> 	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1036)
   [junit4]   2> 	at java.lang.Thread.run(Thread.java:748)
   [junit4]   2> Caused by: org.apache.lucene.index.CorruptIndexException: codec footer mismatch (file truncated?): actual footer=808464432 vs expected footer=-1071082520 (resource=BufferedChecksumIndexInput(MMapIndexInput(path="/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001/tempDir-002/node1/HdfsBackupRestore_shard1_replica_n1/data/index/_0.si")))
   [junit4]   2> 	at org.apache.lucene.codecs.CodecUtil.validateFooter(CodecUtil.java:523)
   [junit4]   2> 	at org.apache.lucene.codecs.CodecUtil.retrieveChecksum(CodecUtil.java:490)
   [junit4]   2> 	at org.apache.solr.core.backup.repository.BackupRepository.checksum(BackupRepository.java:266)
   [junit4]   2> 	at org.apache.solr.handler.IncrementalShardBackup.incrementalCopy(IncrementalShardBackup.java:178)
   [junit4]   2> 	at org.apache.solr.handler.IncrementalShardBackup.backup(IncrementalShardBackup.java:143)
   [junit4]   2> 	at org.apache.solr.handler.IncrementalShardBackup.backup(IncrementalShardBackup.java:83)
   [junit4]   2> 	at org.apache.solr.handler.admin.BackupCoreOp.execute(BackupCoreOp.java:72)
   [junit4]   2> 	... 40 more
   [junit4]   2> 
   [junit4]   2> 426175 INFO  (TEST-TestHdfsBackupRestoreCore.test-seed#[70D348CD0259E0D7]) [     ] o.a.s.SolrTestCaseJ4 ###Ending test
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=TestHdfsBackupRestoreCore -Dtests.method=test -Dtests.seed=70D348CD0259E0D7 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/test-data/enwiki.random.lines.txt -Dtests.locale=he -Dtests.timezone=Asia/Pyongyang -Dtests.asserts=true -Dtests.file.encoding=ISO-8859-1
   [junit4] ERROR   0.86s J1 | TestHdfsBackupRestoreCore.test <<<
   [junit4]    > Throwable #1: java.io.IOException: Server returned HTTP response code: 500 for URL: http://127.0.0.1:35761/solr/admin/cores?action=BACKUPCORE&core=HdfsBackupRestore_shard1_replica_n1&name=bzk&shardBackupId=md_standalone_0&repository=hdfs
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([70D348CD0259E0D7:F8877717ACA58D2F]:0)
   [junit4]    > 	at sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1900)
   [junit4]    > 	at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1498)
   [junit4]    > 	at java.net.URL.openStream(URL.java:1092)
   [junit4]    > 	at org.apache.solr.handler.BackupRestoreUtils.executeHttpRequest(BackupRestoreUtils.java:98)
   [junit4]    > 	at org.apache.solr.handler.BackupRestoreUtils.runCoreAdminCommand(BackupRestoreUtils.java:86)
   [junit4]    > 	at org.apache.solr.handler.TestHdfsBackupRestoreCore.test(TestHdfsBackupRestoreCore.java:203)
   [junit4]    > 	at java.lang.Thread.run(Thread.java:748)
   [junit4]   2> 426178 WARN  (Listener at localhost.localdomain/45527) [     ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called
   [junit4]   2> 426185 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@53d1041b{datanode,/,null,STOPPED}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/datanode}
   [junit4]   2> 426185 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.AbstractConnector Stopped ServerConnector@5eca1439{HTTP/1.1, (http/1.1)}{localhost:0}
   [junit4]   2> 426185 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.session node0 Stopped scavenging
   [junit4]   2> 426185 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@4422c6e9{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/static,STOPPED}
   [junit4]   2> 426187 WARN  (BP-1550645347-127.0.0.1-1629903877233 heartbeating to localhost.localdomain/127.0.0.1:39737) [     ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted
   [junit4]   2> 426187 WARN  (BP-1550645347-127.0.0.1-1629903877233 heartbeating to localhost.localdomain/127.0.0.1:39737) [     ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-1550645347-127.0.0.1-1629903877233 (Datanode Uuid 1d99bb70-cab6-4588-b98b-5ac57a5408b4) service to localhost.localdomain/127.0.0.1:39737
   [junit4]   2> 426188 WARN  (Listener at localhost.localdomain/45527) [     ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called
   [junit4]   2> 426193 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@5b47aeda{datanode,/,null,STOPPED}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/datanode}
   [junit4]   2> 426194 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.AbstractConnector Stopped ServerConnector@4f6a356f{HTTP/1.1, (http/1.1)}{localhost:0}
   [junit4]   2> 426194 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.session node0 Stopped scavenging
   [junit4]   2> 426194 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@4d2b4b9d{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/static,STOPPED}
   [junit4]   2> 426195 WARN  (BP-1550645347-127.0.0.1-1629903877233 heartbeating to localhost.localdomain/127.0.0.1:39737) [     ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted
   [junit4]   2> 426195 WARN  (BP-1550645347-127.0.0.1-1629903877233 heartbeating to localhost.localdomain/127.0.0.1:39737) [     ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-1550645347-127.0.0.1-1629903877233 (Datanode Uuid dc1a14c8-ef32-4acb-a5d1-68b2fda02bb8) service to localhost.localdomain/127.0.0.1:39737
   [junit4]   2> 426208 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@1165d9cb{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/hdfs}
   [junit4]   2> 426208 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.AbstractConnector Stopped ServerConnector@5000c713{HTTP/1.1, (http/1.1)}{localhost.localdomain:0}
   [junit4]   2> 426208 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.session node0 Stopped scavenging
   [junit4]   2> 426208 INFO  (Listener at localhost.localdomain/45527) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@783587f1{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.2-tests.jar!/webapps/static,STOPPED}
   [junit4]   2> 426224 WARN  (Listener at localhost.localdomain/45527-SendThread(127.0.0.1:42207)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x100d75dd7190006.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x100d75dd7190006, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:77)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x100d75dd7190006, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:77) ~[zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:350) ~[zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1275) [zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 426328 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=1331590011
   [junit4]   2> 426328 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.c.ZkController Remove node as live in ZooKeeper:/live_nodes/127.0.0.1:35761_solr
   [junit4]   2> 426334 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.c.ZkController Publish this node as DOWN...
   [junit4]   2> 426334 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.c.ZkController Publish node=127.0.0.1:35761_solr as DOWN
   [junit4]   2> 426339 INFO  (coreCloseExecutor-3465-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.c.SolrCore [HdfsBackupRestore_shard1_replica_n1]  CLOSING SolrCore org.apache.solr.core.SolrCore@8dc5f9c
   [junit4]   2> 426339 INFO  (coreCloseExecutor-3465-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.HdfsBackupRestore.shard1.replica_n1 tag=SolrCore@8dc5f9c
   [junit4]   2> 426339 INFO  (coreCloseExecutor-3465-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4cadf732: rootName = null, domain = solr.core.HdfsBackupRestore.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.HdfsBackupRestore.shard1.replica_n1/com.codahale.metrics.MetricRegistry@125a21ca
   [junit4]   2> 426353 INFO  (coreCloseExecutor-3465-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.HdfsBackupRestore.shard1.leader tag=SolrCore@8dc5f9c
   [junit4]   2> 426355 INFO  (coreCloseExecutor-3465-thread-1) [n:127.0.0.1:35761_solr     ] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter.close()  ... SKIPPED (unnecessary).
   [junit4]   2> 426358 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node tag=null
   [junit4]   2> 426358 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4c634072: rootName = null, domain = solr.node, service url = null, agent id = null] for registry solr.node/com.codahale.metrics.MetricRegistry@1671c17e
   [junit4]   2> 426364 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jvm tag=null
   [junit4]   2> 426364 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@3cade7d5: rootName = null, domain = solr.jvm, service url = null, agent id = null] for registry solr.jvm/com.codahale.metrics.MetricRegistry@54152066
   [junit4]   2> 426366 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jetty tag=null
   [junit4]   2> 426366 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@634ab1aa: rootName = null, domain = solr.jetty, service url = null, agent id = null] for registry solr.jetty/com.codahale.metrics.MetricRegistry@47d5075
   [junit4]   2> 426366 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.cluster tag=null
   [junit4]   2> 426368 INFO  (closeThreadPool-3467-thread-2) [     ] o.a.s.c.Overseer Overseer (id=72294392078598149-127.0.0.1:35761_solr-n_0000000000) closing
   [junit4]   2> 426369 INFO  (OverseerStateUpdate-72294392078598149-127.0.0.1:35761_solr-n_0000000000) [n:127.0.0.1:35761_solr     ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:35761_solr
   [junit4]   2> 426369 INFO  (OverseerAutoScalingTriggerThread-72294392078598149-127.0.0.1:35761_solr-n_0000000000) [     ] o.a.s.c.a.OverseerTriggerThread OverseerTriggerThread woken up but we are closed, exiting.
   [junit4]   2> 426377 INFO  (closeThreadPool-3467-thread-1) [     ] o.a.s.c.Overseer Overseer (id=72294392078598149-127.0.0.1:35761_solr-n_0000000000) closing
   [junit4]   2> 426476 INFO  (jetty-closer-3461-thread-1) [     ] o.a.s.c.Overseer Overseer (id=72294392078598149-127.0.0.1:35761_solr-n_0000000000) closing
   [junit4]   2> 426478 INFO  (jetty-closer-3461-thread-1) [     ] o.e.j.s.AbstractConnector Stopped ServerConnector@14328b34{HTTP/1.1, (http/1.1, h2c)}{127.0.0.1:0}
   [junit4]   2> 426478 INFO  (jetty-closer-3461-thread-1) [     ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@437eb845{/solr,null,STOPPED}
   [junit4]   2> 426478 INFO  (jetty-closer-3461-thread-1) [     ] o.e.j.s.session node0 Stopped scavenging
   [junit4]   2> 426479 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer Shutting down ZkTestServer.
   [junit4]   2> 426582 WARN  (Listener at localhost.localdomain/45527-SendThread(127.0.0.1:42207)) [     ] o.a.z.ClientCnxn An exception was thrown while closing send thread for session 0x100d75dd7190001.
   [junit4]   2>           => EndOfStreamException: Unable to read additional data from server sessionid 0x100d75dd7190001, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:77)
   [junit4]   2> org.apache.zookeeper.ClientCnxn$EndOfStreamException: Unable to read additional data from server sessionid 0x100d75dd7190001, likely server has closed socket
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doIO(ClientCnxnSocketNIO.java:77) ~[zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:350) ~[zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 	at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1275) [zookeeper-3.6.2.jar:3.6.2]
   [junit4]   2> 426683 WARN  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Watch limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2> 	3	/solr/collections/HdfsBackupRestore/terms/shard1
   [junit4]   2> 	2	/solr/aliases.json
   [junit4]   2> 	2	/solr/clusterprops.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2> 	9	/solr/collections/HdfsBackupRestore/state.json
   [junit4]   2> 	2	/solr/clusterstate.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2> 	12	/solr/overseer/queue
   [junit4]   2> 	5	/solr/overseer/collection-queue-work
   [junit4]   2> 	4	/solr/collections
   [junit4]   2> 	4	/solr/collections/HdfsBackupRestore/state.json
   [junit4]   2> 	3	/solr/live_nodes
   [junit4]   2> 
   [junit4]   2> 426685 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer waitForServerDown: 127.0.0.1:42207
   [junit4]   2> 426685 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:42207
   [junit4]   2> 426685 INFO  (Listener at localhost.localdomain/45527) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 42207
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene/Lucene-Solr-NightlyTests-8.x/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestHdfsBackupRestoreCore_70D348CD0259E0D7-001
   [junit4]   2> Aug 25, 2021 3:04:43 PM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 33 leaked thread(s).
   [junit4]   2> NOTE: test params are: codec=SimpleText, sim=Asserting(RandomSimilarity(queryNorm=false): {}), locale=he, timezone=Asia/Pyongyang
   [junit4]   2> NOTE: Linux 4.15.0-147-generic amd64/Oracle Corporation 1.8.0_291 (64-bit)/cpus=4,threads=7,free=193590736,total=527958016
   [junit4]   2> NOTE: All tests run in this JVM: [HdfsDirectoryFactoryTest, AutoAddReplicasPlanActionTest, CollectionTooManyReplicasTest, TestChildDocTransformer, SpatialFilterTest, RootFieldTest, TestZkAclsWithHadoopAuth, SolrCLIZkUtilsTest, ProtectedTermFilterFactoryTest, MoveReplicaHDFSTest, SampleTest, TestCloudDeleteByQuery, BufferStoreTest, TestSimNodeAddedTrigger, LeaderTragicEventTest, ThreadDumpHandlerTest, TestLMJelinekMercerSimilarityFactory, TestFastLRUCache, TestPostingsSolrHighlighter, TestReplicationHandlerDiskOverFlow, SQLWithAuthzEnabledTest, TestJmxIntegration, TestJoin, TestTrieFacet, TestSolrFieldCacheBean, CSVRequestHandlerTest, TransactionLogTest, SyncSliceTest, NestedAtomicUpdateTest, TestStandardQParsers, TestSolrDeletionPolicy2, TestSimTriggerIntegration, BlockJoinFacetDistribTest, TestSegmentSorting, TestCorePropertiesReload, TestHighFrequencyDictionaryFactory, TestSafeXMLParsing, IndexSchemaTest, ReturnFieldsTest, NodeAddedTriggerTest, TestTestInjection, TestConfigSetsAPIZkFailure, SolrLogPostToolTest, AggValueSourceTest, TestAuthorizationFramework, DistributedFacetPivotSmallTest, OpenExchangeRatesOrgProviderTest, SearchRateTriggerIntegrationTest, HdfsSyncSliceTest, TestNumericTerms32, TestSolrCoreProperties, TestFastOutputStream, TestGraphMLResponseWriter, TestHdfsBackupRestoreCore]
   [junit4] Completed [153/946 (1!)] on J1 in 7.81s, 1 test, 1 error <<< FAILURES!

[...truncated 56883 lines...]

[JENKINS] Lucene » Lucene-Solr-NightlyTests-8.x - Build # 372 - Failure!

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://ci-builds.apache.org/job/Lucene/job/Lucene-Solr-NightlyTests-8.x/372/

No tests ran.

Build Log:
[...truncated 11270 lines...]
ERROR: command execution failed.
ERROR: Step ‘Archive the artifacts’ failed: no workspace for Lucene/Lucene-Solr-NightlyTests-8.x #372
ERROR: Step ‘Publish JUnit test result report’ failed: no workspace for Lucene/Lucene-Solr-NightlyTests-8.x #372
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest
ERROR: lucene2 is offline; cannot locate jdk_1.8_latest