You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/07/15 12:20:36 UTC

Build failed in Jenkins: Phoenix | Master #2455

See <https://builds.apache.org/job/Phoenix-master/2455/display/redirect?page=changes>

Changes:

[chenglei] PHOENIX-5389 Push down PostFilter to Sub-JoinTable for SortMergeJoin and

------------------------------------------
[...truncated 172.65 KB...]
org.apache.hadoop.hbase.DoNotRetryIOException: SCHEMA2.N000023: java.lang.OutOfMemoryError: unable to create new native thread
	at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:113)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:661)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:17038)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:8016)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:2409)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:2391)
	at org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:42010)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:409)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:130)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:324)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:304)
Caused by: java.lang.RuntimeException: java.lang.OutOfMemoryError: unable to create new native thread
	at org.apache.hadoop.hbase.client.RpcRetryingCallerImpl.callWithoutRetries(RpcRetryingCallerImpl.java:200)
	at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:269)
	at org.apache.hadoop.hbase.client.ClientScanner.loadCache(ClientScanner.java:437)
	at org.apache.hadoop.hbase.client.ClientScanner.nextWithSyncCache(ClientScanner.java:312)
	at org.apache.hadoop.hbase.client.ClientScanner.next(ClientScanner.java:597)
	at org.apache.phoenix.coprocessor.ViewFinder.findRelatedViews(ViewFinder.java:94)
	at org.apache.phoenix.coprocessor.ViewFinder.findParentViewofIndex(ViewFinder.java:53)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.findAncestorViewsOfIndex(MetaDataEndpointImpl.java:2570)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addDerivedColumnsFromAncestors(MetaDataEndpointImpl.java:749)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.combineColumns(MetaDataEndpointImpl.java:685)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTableFromCache(MetaDataEndpointImpl.java:1937)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:3735)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:616)
	... 9 more
Caused by: java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:717)
	at java.util.concurrent.ThreadPoolExecutor.addWorker(ThreadPoolExecutor.java:957)
	at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1367)
	at org.apache.hadoop.hbase.client.ResultBoundedCompletionService.submit(ResultBoundedCompletionService.java:171)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.addCallsForCurrentReplica(ScannerCallableWithReplicas.java:320)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:182)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:58)
	at org.apache.hadoop.hbase.client.RpcRetryingCallerImpl.callWithoutRetries(RpcRetryingCallerImpl.java:192)
	... 21 more

Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException: 
org.apache.hadoop.hbase.DoNotRetryIOException: SCHEMA2.N000023: java.lang.OutOfMemoryError: unable to create new native thread
	at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:113)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:661)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:17038)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:8016)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:2409)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:2391)
	at org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:42010)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:409)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:130)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:324)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:304)
Caused by: java.lang.RuntimeException: java.lang.OutOfMemoryError: unable to create new native thread
	at org.apache.hadoop.hbase.client.RpcRetryingCallerImpl.callWithoutRetries(RpcRetryingCallerImpl.java:200)
	at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:269)
	at org.apache.hadoop.hbase.client.ClientScanner.loadCache(ClientScanner.java:437)
	at org.apache.hadoop.hbase.client.ClientScanner.nextWithSyncCache(ClientScanner.java:312)
	at org.apache.hadoop.hbase.client.ClientScanner.next(ClientScanner.java:597)
	at org.apache.phoenix.coprocessor.ViewFinder.findRelatedViews(ViewFinder.java:94)
	at org.apache.phoenix.coprocessor.ViewFinder.findParentViewofIndex(ViewFinder.java:53)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.findAncestorViewsOfIndex(MetaDataEndpointImpl.java:2570)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addDerivedColumnsFromAncestors(MetaDataEndpointImpl.java:749)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.combineColumns(MetaDataEndpointImpl.java:685)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTableFromCache(MetaDataEndpointImpl.java:1937)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:3735)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:616)
	... 9 more
Caused by: java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:717)
	at java.util.concurrent.ThreadPoolExecutor.addWorker(ThreadPoolExecutor.java:957)
	at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1367)
	at org.apache.hadoop.hbase.client.ResultBoundedCompletionService.submit(ResultBoundedCompletionService.java:171)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.addCallsForCurrentReplica(ScannerCallableWithReplicas.java:320)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:182)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:58)
	at org.apache.hadoop.hbase.client.RpcRetryingCallerImpl.callWithoutRetries(RpcRetryingCallerImpl.java:192)
	... 21 more


[ERROR] testUpdatableViewsWithSameNameDifferentTenantsWithLocalIndex(org.apache.phoenix.end2end.TenantSpecificViewIndexIT)  Time elapsed: 608.965 s  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: java.util.concurrent.TimeoutException: The procedure 184 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenantsWithLocalIndex(TenantSpecificViewIndexIT.java:74)
Caused by: org.apache.hadoop.hbase.exceptions.TimeoutIOException: java.util.concurrent.TimeoutException: The procedure 184 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenantsWithLocalIndex(TenantSpecificViewIndexIT.java:74)
Caused by: java.util.concurrent.TimeoutException: The procedure 184 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenantsWithLocalIndex(TenantSpecificViewIndexIT.java:74)

[ERROR] testMultiCFViewIndex(org.apache.phoenix.end2end.TenantSpecificViewIndexIT)  Time elapsed: 608.515 s  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: java.util.concurrent.TimeoutException: The procedure 186 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.createTableAndValidate(TenantSpecificViewIndexIT.java:106)
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testMultiCFViewIndex(TenantSpecificViewIndexIT.java:124)
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testMultiCFViewIndex(TenantSpecificViewIndexIT.java:80)
Caused by: org.apache.hadoop.hbase.exceptions.TimeoutIOException: java.util.concurrent.TimeoutException: The procedure 186 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.createTableAndValidate(TenantSpecificViewIndexIT.java:106)
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testMultiCFViewIndex(TenantSpecificViewIndexIT.java:124)
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testMultiCFViewIndex(TenantSpecificViewIndexIT.java:80)
Caused by: java.util.concurrent.TimeoutException: The procedure 186 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.createTableAndValidate(TenantSpecificViewIndexIT.java:106)
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testMultiCFViewIndex(TenantSpecificViewIndexIT.java:124)
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testMultiCFViewIndex(TenantSpecificViewIndexIT.java:80)

[ERROR] testUpdatableViewLocalIndex(org.apache.phoenix.end2end.TenantSpecificViewIndexIT)  Time elapsed: 608.434 s  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: java.util.concurrent.TimeoutException: The procedure 188 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testUpdatableViewLocalIndex(TenantSpecificViewIndexIT.java:59)
Caused by: org.apache.hadoop.hbase.exceptions.TimeoutIOException: java.util.concurrent.TimeoutException: The procedure 188 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testUpdatableViewLocalIndex(TenantSpecificViewIndexIT.java:59)
Caused by: java.util.concurrent.TimeoutException: The procedure 188 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testUpdatableViewLocalIndex(TenantSpecificViewIndexIT.java:59)

[ERROR] testOverlappingDatesFilter(org.apache.phoenix.end2end.TenantSpecificViewIndexIT)  Time elapsed: 608.389 s  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: java.util.concurrent.TimeoutException: The procedure 190 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testOverlappingDatesFilter(TenantSpecificViewIndexIT.java:314)
Caused by: org.apache.hadoop.hbase.exceptions.TimeoutIOException: java.util.concurrent.TimeoutException: The procedure 190 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testOverlappingDatesFilter(TenantSpecificViewIndexIT.java:314)
Caused by: java.util.concurrent.TimeoutException: The procedure 190 is still running
	at org.apache.phoenix.end2end.TenantSpecificViewIndexIT.testOverlappingDatesFilter(TenantSpecificViewIndexIT.java:314)

Build timed out (after 300 minutes). Marking the build as failed.
Build was aborted
Recording test results
[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR]   AlterMultiTenantTableWithViewsIT.testAddDropColumnToBaseTablePropagatesToEntireViewHierarchy:156 » PhoenixIO
[ERROR]   AlterMultiTenantTableWithViewsIT.testCacheInvalidatedAfterAddingColumnToBaseTableWithViews:613 » PhoenixIO
[ERROR]   TenantSpecificViewIndexIT.testMultiCFViewIndex:80->testMultiCFViewIndex:124->createTableAndValidate:106 » PhoenixIO
[ERROR]   TenantSpecificViewIndexIT.testOverlappingDatesFilter:314 » PhoenixIO java.util...
[ERROR]   TenantSpecificViewIndexIT.testUpdatableView:54->BaseTenantSpecificViewIndexIT.testUpdatableView:46->BaseTenantSpecificViewIndexIT.testUpdatableView:57->BaseTenantSpecificViewIndexIT.verifyViewData:183 » PhoenixIO
[ERROR]   TenantSpecificViewIndexIT.testUpdatableViewLocalIndex:59->BaseTenantSpecificViewIndexIT.testUpdatableView:52->BaseTenantSpecificViewIndexIT.createBaseTable:116 » PhoenixIO
[ERROR]   TenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenants:69->BaseTenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenants:78->BaseTenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenants:97->BaseTenantSpecificViewIndexIT.createAndVerifyIndex:140 » PhoenixIO
[ERROR]   TenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenantsWithLocalIndex:74->BaseTenantSpecificViewIndexIT.testUpdatableViewsWithSameNameDifferentTenants:85->BaseTenantSpecificViewIndexIT.createBaseTable:116 » PhoenixIO
[ERROR]   ViewIndexIT>SplitSystemCatalogIT.doSetup:57->BaseTest.setUpTestDriver:515->BaseTest.setUpTestDriver:521->BaseTest.initAndRegisterTestDriver:661 » PhoenixIO
[INFO] 
[ERROR] Tests run: 104, Failures: 0, Errors: 9, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) @ phoenix-core ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary for Apache Phoenix 5.1.0-HBase-2.0-SNAPSHOT:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  2.353 s]
[INFO] Phoenix Core ....................................... FAILURE [  04:59 h]
[INFO] Phoenix - Pherf .................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time:  05:00 h
[INFO] Finished at: 2019-07-15T12:20:29Z
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] org.apache.maven.surefire.booter.SurefireBooterForkException: ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter6437938817433543980.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun7 surefire8353953758194574716tmp surefire_538728260784947091750tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 143
[ERROR] Crashed tests:
[ERROR] org.apache.phoenix.end2end.ViewIT
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter7972000424345504967.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun6 surefire3585029275437508039tmp surefire_5756547695266076288591tmp
[ERROR] Process Exit Code: 0
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter8603228143191508694.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire3038359653295725515tmp surefire_6186829252945112355259tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter8492873231783888420.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire5301081427237177971tmp surefire_6197669264516114015278tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter2617853747795887092.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire2506704388828007744tmp surefire_6204951134613466551968tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter2282932654658010887.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire5520394643256538685tmp surefire_621349314929755841396tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter1458909760667185456.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire8060320629527152026tmp surefire_6221871097949396791843tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter2003008373742854717.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire4418081778592759083tmp surefire_6232069346589033648671tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] ExecutionException The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter3694318991241946422.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire1779506395494997424tmp surefire_6241341848572544381218tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.awaitResultsDone(ForkStarter.java:494)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.runSuitesForkPerTestSet(ForkStarter.java:441)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.run(ForkStarter.java:292)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.run(ForkStarter.java:243)
[ERROR] 	at org.apache.maven.plugin.surefire.AbstractSurefireMojo.executeProvider(AbstractSurefireMojo.java:1077)
[ERROR] 	at org.apache.maven.plugin.surefire.AbstractSurefireMojo.executeAfterPreconditionsChecked(AbstractSurefireMojo.java:907)
[ERROR] 	at org.apache.maven.plugin.surefire.AbstractSurefireMojo.execute(AbstractSurefireMojo.java:785)
[ERROR] 	at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:137)
[ERROR] 	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:210)
[ERROR] 	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:156)
[ERROR] 	at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:148)
[ERROR] 	at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:117)
[ERROR] 	at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:81)
[ERROR] 	at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build(SingleThreadedBuilder.java:56)
[ERROR] 	at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:128)
[ERROR] 	at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:305)
[ERROR] 	at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:192)
[ERROR] 	at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:105)
[ERROR] 	at org.apache.maven.cli.MavenCli.execute(MavenCli.java:956)
[ERROR] 	at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:288)
[ERROR] 	at org.apache.maven.cli.MavenCli.main(MavenCli.java:192)
[ERROR] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[ERROR] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[ERROR] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[ERROR] 	at java.lang.reflect.Method.invoke(Method.java:498)
[ERROR] 	at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced(Launcher.java:289)
[ERROR] 	at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:229)
[ERROR] 	at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode(Launcher.java:415)
[ERROR] 	at org.codehaus.plexus.classworlds.launcher.Launcher.main(Launcher.java:356)
[ERROR] Caused by: org.apache.maven.surefire.booter.SurefireBooterForkException: The forked VM terminated without properly saying goodbye. VM crash or System.exit called?
[ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core> && /usr/local/asfpackages/java/jdk1.8.0_191/jre/bin/java -enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom '-Djava.library.path=${hadoop.library.path}:/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib' -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ -jar <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire/surefirebooter3694318991241946422.jar> <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/surefire> 2019-07-15T07-23-17_508-jvmRun5 surefire1779506395494997424tmp surefire_6241341848572544381218tmp
[ERROR] Error occurred in starting fork, check output in log
[ERROR] Process Exit Code: 1
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.fork(ForkStarter.java:679)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.fork(ForkStarter.java:533)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter.access$600(ForkStarter.java:117)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter$2.call(ForkStarter.java:429)
[ERROR] 	at org.apache.maven.plugin.surefire.booterclient.ForkStarter$2.call(ForkStarter.java:406)
[ERROR] 	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[ERROR] 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[ERROR] 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[ERROR] 	at java.lang.Thread.run(Thread.java:748)
[ERROR] 
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core

Jenkins build is back to normal : Phoenix | Master #2456

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-master/2456/display/redirect?page=changes>