You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2017/01/25 19:52:12 UTC

Build failed in Jenkins: Phoenix-encode-columns #46

See <https://builds.apache.org/job/Phoenix-encode-columns/46/changes>

Changes:

[tdsilva] PHOENIX-3519 Specify QualifierEncodingScheme in CREATE TABLE statement

------------------------------------------
[...truncated 1011 lines...]
	at org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:51)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:193)
	at org.apache.hadoop.hbase.client.AsyncProcess$1.run(AsyncProcess.java:622)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
: 1 time, org.apache.hadoop.hbase.DoNotRetryIOException
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:401)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1013)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1009)
	at org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2580)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
	at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
	at java.lang.Thread.run(Thread.java:745)
: 1 time, servers with issues: proserpina.apache.org,54869,1485373223522, proserpina.apache.org,40190,1485373223490, 
	at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:211)
	at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$500(AsyncProcess.java:195)
	at org.apache.hadoop.hbase.client.AsyncProcess.getErrors(AsyncProcess.java:1082)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatchCallback(HConnectionManager.java:2479)
	at org.apache.hadoop.hbase.client.HTable.batchCallback(HTable.java:898)
	at org.apache.hadoop.hbase.client.HTable.batchCallback(HTable.java:913)
	at org.apache.hadoop.hbase.client.HTable.batch(HTable.java:888)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost$Environment$HTableWrapper.batch(CoprocessorHost.java:595)
	at org.apache.phoenix.execute.DelegateHTable.batch(DelegateHTable.java:94)
	at org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:169)
	at org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:134)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	... 1 more
: 1 time, servers with issues: proserpina.apache.org,54869,1485373223522, 
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT.helpTestWriteFailureDisablesIndex(MutableIndexFailureIT.java:232)
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT.testWriteFailureDisablesIndex(MutableIndexFailureIT.java:134)
Caused by: org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException: 
Failed 1 action: org.apache.hadoop.hbase.DoNotRetryIOException: Failed 2 actions: org.apache.hadoop.hbase.DoNotRetryIOException
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:401)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1013)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1009)
	at org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2580)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
	at org.apache.hadoop.hbase.client.CoprocessorHConnection$1.callBlockingMethod(CoprocessorHConnection.java:144)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.multi(ClientProtos.java:32986)
	at org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:113)
	at org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:51)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:193)
	at org.apache.hadoop.hbase.client.AsyncProcess$1.run(AsyncProcess.java:622)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
: 1 time, org.apache.hadoop.hbase.DoNotRetryIOException
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:401)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1013)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1009)
	at org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2580)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
	at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
	at java.lang.Thread.run(Thread.java:745)
: 1 time, servers with issues: proserpina.apache.org,54869,1485373223522, proserpina.apache.org,40190,1485373223490, 
	at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:73)
	at org.apache.phoenix.index.PhoenixIndexFailurePolicy.handleFailure(PhoenixIndexFailurePolicy.java:114)
	at org.apache.phoenix.hbase.index.write.IndexWriter.writeAndKillYourselfOnFailure(IndexWriter.java:151)
	at org.apache.phoenix.hbase.index.write.IndexWriter.writeAndKillYourselfOnFailure(IndexWriter.java:135)
	at org.apache.phoenix.hbase.index.Indexer.doPostWithExceptions(Indexer.java:453)
	at org.apache.phoenix.hbase.index.Indexer.doPost(Indexer.java:401)
	at org.apache.phoenix.hbase.index.Indexer.postBatchMutate(Indexer.java:396)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$36.call(RegionCoprocessorHost.java:1028)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postBatchMutate(RegionCoprocessorHost.java:1024)
	at org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2687)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
	at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException: Failed 2 actions: org.apache.hadoop.hbase.DoNotRetryIOException
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:401)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1013)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1009)
	at org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2580)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
	at org.apache.hadoop.hbase.client.CoprocessorHConnection$1.callBlockingMethod(CoprocessorHConnection.java:144)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.multi(ClientProtos.java:32986)
	at org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:113)
	at org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:51)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:193)
	at org.apache.hadoop.hbase.client.AsyncProcess$1.run(AsyncProcess.java:622)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
: 1 time, org.apache.hadoop.hbase.DoNotRetryIOException
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:401)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1013)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1009)
	at org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2580)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
	at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
	at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
	at java.lang.Thread.run(Thread.java:745)
: 1 time, servers with issues: proserpina.apache.org,54869,1485373223522, proserpina.apache.org,40190,1485373223490, 
	at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:211)
	at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$500(AsyncProcess.java:195)
	at org.apache.hadoop.hbase.client.AsyncProcess.getErrors(AsyncProcess.java:1082)
	at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatchCallback(HConnectionManager.java:2479)
	at org.apache.hadoop.hbase.client.HTable.batchCallback(HTable.java:898)
	at org.apache.hadoop.hbase.client.HTable.batchCallback(HTable.java:913)
	at org.apache.hadoop.hbase.client.HTable.batch(HTable.java:888)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost$Environment$HTableWrapper.batch(CoprocessorHost.java:595)
	at org.apache.phoenix.execute.DelegateHTable.batch(DelegateHTable.java:94)
	at org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:169)
	at org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:134)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	... 1 more
: 1 time, servers with issues: proserpina.apache.org,54869,1485373223522, 
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT.helpTestWriteFailureDisablesIndex(MutableIndexFailureIT.java:232)
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT.testWriteFailureDisablesIndex(MutableIndexFailureIT.java:134)

Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.48 sec - in org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 63.604 sec - in org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Running org.apache.phoenix.monitoring.PhoenixMetricsIT
Running org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.753 sec - in org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 61.347 sec - in org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.rpc.PhoenixServerRpcIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.64 sec - in org.apache.phoenix.rpc.PhoenixServerRpcIT
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 43.81 sec - in org.apache.phoenix.monitoring.PhoenixMetricsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 162.752 sec - in org.apache.phoenix.iterate.ScannerLeaseRenewalIT

Results :

Tests in error: 
  MutableIndexFailureIT.testWriteFailureDisablesIndex:134->helpTestWriteFailureDisablesIndex:232 » Commit

Tests run: 445, Failures: 0, Errors: 1, Skipped: 65

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:verify (ParallelStatsEnabledTest) @ phoenix-core ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  2.886 s]
[INFO] Phoenix Core ....................................... FAILURE [52:30 min]
[INFO] Phoenix - Flume .................................... SKIPPED
[INFO] Phoenix - Pig ...................................... SKIPPED
[INFO] Phoenix Query Server Client ........................ SKIPPED
[INFO] Phoenix Query Server ............................... SKIPPED
[INFO] Phoenix - Pherf .................................... SKIPPED
[INFO] Phoenix - Spark .................................... SKIPPED
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 52:35 min
[INFO] Finished at: 2017-01-25T19:47:05+00:00
[INFO] Final Memory: 60M/982M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.19.1:verify (ParallelStatsEnabledTest) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-encode-columns/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Compressed 1.78 GB of artifacts by 70.3% relative to #45
Recording test results

Jenkins build is back to normal : Phoenix-encode-columns #48

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-encode-columns/48/changes>


Build failed in Jenkins: Phoenix-encode-columns #47

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-encode-columns/47/changes>

Changes:

[tdsilva] PHOENIX-3586 Add StorageScheme table property to allow users to specify

------------------------------------------
[...truncated 744 lines...]
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.404 sec - in org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
Running org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.307 sec - in org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.968 sec - in org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
Running org.apache.phoenix.rpc.UpdateCacheIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.101 sec - in org.apache.phoenix.rpc.UpdateCacheIT
Running org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.845 sec - in org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
Running org.apache.phoenix.trace.PhoenixTraceReaderIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.824 sec - in org.apache.phoenix.trace.PhoenixTraceReaderIT
Running org.apache.phoenix.trace.PhoenixTracingEndToEndIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 38.731 sec - in org.apache.phoenix.iterate.RoundRobinResultIteratorIT
Running org.apache.phoenix.tx.FlappingTransactionIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.353 sec - in org.apache.phoenix.tx.FlappingTransactionIT
Running org.apache.phoenix.tx.TransactionIT
Tests run: 19, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 66.005 sec - in org.apache.phoenix.tx.TransactionIT
Running org.apache.phoenix.tx.TxCheckpointIT
Tests run: 67, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 335.278 sec - in org.apache.phoenix.end2end.index.IndexExpressionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 86.98 sec - in org.apache.phoenix.trace.PhoenixTracingEndToEndIT
Tests run: 102, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 755.247 sec - in org.apache.phoenix.end2end.SortMergeJoinIT
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 107.159 sec - in org.apache.phoenix.tx.TxCheckpointIT
Tests run: 152, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 730.541 sec - in org.apache.phoenix.end2end.index.IndexIT

Results :

Tests run: 1768, Failures: 0, Errors: 0, Skipped: 13

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test (ClientManagedTimeTests) @ phoenix-core ---

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.phoenix.end2end.CastAndCoerceIT
Running org.apache.phoenix.end2end.AggregateQueryIT
Running org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT
Running org.apache.phoenix.end2end.CaseStatementIT
Running org.apache.phoenix.end2end.ArrayIT
Tests run: 49, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 38.842 sec - in org.apache.phoenix.end2end.CastAndCoerceIT
Running org.apache.phoenix.end2end.ColumnProjectionOptimizationIT
Tests run: 63, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.752 sec - in org.apache.phoenix.end2end.CaseStatementIT
Running org.apache.phoenix.end2end.CreateSchemaIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.271 sec - in org.apache.phoenix.end2end.CreateSchemaIT
Running org.apache.phoenix.end2end.CreateTableIT
Tests run: 49, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 55.749 sec - in org.apache.phoenix.end2end.AggregateQueryIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.156 sec - in org.apache.phoenix.end2end.ColumnProjectionOptimizationIT
Running org.apache.phoenix.end2end.CustomEntityDataIT
Running org.apache.phoenix.end2end.DerivedTableIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.525 sec - in org.apache.phoenix.end2end.CustomEntityDataIT
Running org.apache.phoenix.end2end.DistinctCountIT
Tests run: 79, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.542 sec - in org.apache.phoenix.end2end.ArrayIT
Running org.apache.phoenix.end2end.DropSchemaIT
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.159 sec - in org.apache.phoenix.end2end.DerivedTableIT
Running org.apache.phoenix.end2end.ExtendedQueryExecIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.253 sec - in org.apache.phoenix.end2end.DropSchemaIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.308 sec - in org.apache.phoenix.end2end.DistinctCountIT
Running org.apache.phoenix.end2end.FunkyNamesIT
Running org.apache.phoenix.end2end.GroupByIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.744 sec - in org.apache.phoenix.end2end.ExtendedQueryExecIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.974 sec - in org.apache.phoenix.end2end.FunkyNamesIT
Running org.apache.phoenix.end2end.NotQueryIT
Running org.apache.phoenix.end2end.NativeHBaseTypesIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.737 sec - in org.apache.phoenix.end2end.NativeHBaseTypesIT
Running org.apache.phoenix.end2end.PointInTimeQueryIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 53.541 sec - in org.apache.phoenix.end2end.CreateTableIT
Running org.apache.phoenix.end2end.ProductMetricsIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.422 sec - in org.apache.phoenix.end2end.PointInTimeQueryIT
Running org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
Tests run: 77, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.579 sec - in org.apache.phoenix.end2end.NotQueryIT
Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.265 sec - in org.apache.phoenix.end2end.ProductMetricsIT
Running org.apache.phoenix.end2end.QueryIT
Tests run: 105, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 65.762 sec - in org.apache.phoenix.end2end.GroupByIT
Tests run: 245, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 146.497 sec - in org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT
Running org.apache.phoenix.end2end.ReadIsolationLevelIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.562 sec - in org.apache.phoenix.end2end.ReadIsolationLevelIT
Running org.apache.phoenix.end2end.SequenceBulkAllocationIT
Running org.apache.phoenix.end2end.RowValueConstructorIT
Running org.apache.phoenix.end2end.ScanQueryIT
Tests run: 56, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.351 sec - in org.apache.phoenix.end2end.SequenceBulkAllocationIT
Running org.apache.phoenix.end2end.SequenceIT
Tests run: 19, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 88.935 sec - in org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
Running org.apache.phoenix.end2end.ToNumberFunctionIT
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.079 sec - in org.apache.phoenix.end2end.ToNumberFunctionIT
Running org.apache.phoenix.end2end.TopNIT
Tests run: 54, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.951 sec - in org.apache.phoenix.end2end.SequenceIT
Running org.apache.phoenix.end2end.TruncateFunctionIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.896 sec - in org.apache.phoenix.end2end.TopNIT
Running org.apache.phoenix.end2end.UpsertSelectIT
Tests run: 126, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 83.88 sec - in org.apache.phoenix.end2end.QueryIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.297 sec - in org.apache.phoenix.end2end.TruncateFunctionIT
Running org.apache.phoenix.end2end.UpsertValuesIT
Tests run: 46, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 70.649 sec - in org.apache.phoenix.end2end.RowValueConstructorIT
Running org.apache.phoenix.end2end.salted.SaltedTableIT
Tests run: 119, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 59.111 sec - in org.apache.phoenix.end2end.ScanQueryIT
Running org.apache.phoenix.rpc.UpdateCacheWithScnIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.601 sec - in org.apache.phoenix.rpc.UpdateCacheWithScnIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.653 sec - in org.apache.phoenix.end2end.salted.SaltedTableIT
Running org.apache.phoenix.end2end.VariableLengthPKIT
Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 62.475 sec - in org.apache.phoenix.end2end.UpsertValuesIT
Tests run: 50, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 40.987 sec - in org.apache.phoenix.end2end.VariableLengthPKIT
Tests run: 22, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 79.756 sec - in org.apache.phoenix.end2end.UpsertSelectIT

Results :

Tests run: 1360, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test (HBaseManagedTimeTests) @ phoenix-core ---

-------------------------------------------------------
 T E S T S
-------------------------------------------------------

Results :

Tests run: 0, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test (NeedTheirOwnClusterTests) @ phoenix-core ---

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.909 sec - in org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
Running org.apache.phoenix.end2end.ConnectionUtilIT
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.513 sec - in org.apache.phoenix.end2end.ConnectionUtilIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.967 sec - in org.apache.phoenix.end2end.CountDistinctCompressionIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.86 sec - in org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.IndexExtendedIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 62.743 sec - in org.apache.phoenix.end2end.CsvBulkLoadToolIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.265 sec - in org.apache.phoenix.end2end.QueryWithLimitIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.344 sec - in org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.RenewLeaseIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.003 sec - in org.apache.phoenix.end2end.RenewLeaseIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 16, Time elapsed: 44.144 sec - in org.apache.phoenix.end2end.IndexExtendedIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.081 sec - in org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.166 sec - in org.apache.phoenix.end2end.SpillableGroupByIT
Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
Tests run: 1, Failures: 1, Errors: 0, Skipped: 0, Time elapsed: 0.006 sec <<< FAILURE! - in org.apache.phoenix.end2end.UserDefinedFunctionsIT
org.apache.phoenix.end2end.UserDefinedFunctionsIT  Time elapsed: 0.006 sec  <<< FAILURE!
java.lang.AssertionError: expected:<0> but was:<1>
	at org.apache.phoenix.end2end.UserDefinedFunctionsIT.compileTestClass(UserDefinedFunctionsIT.java:1080)
	at org.apache.phoenix.end2end.UserDefinedFunctionsIT.doSetup(UserDefinedFunctionsIT.java:267)

Running org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.end2end.index.LocalIndexIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 140.924 sec - in org.apache.phoenix.end2end.index.ImmutableIndexIT
Tests run: 120, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 199.4 sec - in org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 120, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 192.434 sec - in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 165.806 sec - in org.apache.phoenix.end2end.index.LocalIndexIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.856 sec - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Running org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.138 sec - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Running org.apache.phoenix.execute.PartialCommitIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.398 sec - in org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.946 sec - in org.apache.phoenix.execute.PartialCommitIT
Running org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.504 sec - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Running org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.367 sec - in org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.895 sec - in org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 265.27 sec - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.793 sec - in org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 61.95 sec - in org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 62.562 sec - in org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.monitoring.PhoenixMetricsIT
Running org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.901 sec - in org.apache.phoenix.rpc.PhoenixClientRpcIT
Running org.apache.phoenix.rpc.PhoenixServerRpcIT
Tests run: 2, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 13.946 sec <<< FAILURE! - in org.apache.phoenix.rpc.PhoenixServerRpcIT
org.apache.phoenix.rpc.PhoenixServerRpcIT  Time elapsed: 13.945 sec  <<< ERROR!
com.google.common.util.concurrent.UncheckedExecutionException: java.lang.Exception: Service failed while running
Caused by: java.lang.Exception: Service failed while running
Caused by: com.google.common.util.concurrent.UncheckedExecutionException: org.apache.thrift.transport.TTransportException: Could not create ServerSocket on address /0.0.0.0:57808.
Caused by: org.apache.thrift.transport.TTransportException: Could not create ServerSocket on address /0.0.0.0:57808.

Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.173 sec - in org.apache.phoenix.monitoring.PhoenixMetricsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 161.906 sec - in org.apache.phoenix.iterate.ScannerLeaseRenewalIT

Results :

Failed tests: 
  UserDefinedFunctionsIT.doSetup:267->compileTestClass:1080 expected:<0> but was:<1>
Tests in error: 
  PhoenixServerRpcIT>BaseUniqueNamesOwnClusterIT.doTeardown:36->BaseTest.tearDownMiniCluster:544->BaseTest.tearDownTxManager:441 » UncheckedExecution

Tests run: 433, Failures: 1, Errors: 1, Skipped: 65

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:verify (ParallelStatsEnabledTest) @ phoenix-core ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  2.050 s]
[INFO] Phoenix Core ....................................... FAILURE [51:31 min]
[INFO] Phoenix - Flume .................................... SKIPPED
[INFO] Phoenix - Pig ...................................... SKIPPED
[INFO] Phoenix Query Server Client ........................ SKIPPED
[INFO] Phoenix Query Server ............................... SKIPPED
[INFO] Phoenix - Pherf .................................... SKIPPED
[INFO] Phoenix - Spark .................................... SKIPPED
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 51:35 min
[INFO] Finished at: 2017-01-25T23:01:05+00:00
[INFO] Final Memory: 59M/681M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.19.1:verify (ParallelStatsEnabledTest) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-encode-columns/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Compressed 1.76 GB of artifacts by 70.1% relative to #45
Recording test results