You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2014/10/07 22:10:12 UTC

Build failed in Jenkins: Phoenix | Master #409

See <https://builds.apache.org/job/Phoenix-master/409/changes>

Changes:

[jtaylor] Attempting to make unit tests more resilient

------------------------------------------
[...truncated 1981 lines...]

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:285)
	at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:316)
	at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:164)
	at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:59)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90)
	at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:282)
	at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:187)
	at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:182)
	at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:109)
	at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:738)
	at org.apache.phoenix.iterate.TableResultIterator.<init>(TableResultIterator.java:54)
	at org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:583)
	at org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:578)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException: org.apache.hadoop.hbase.DoNotRetryIOException: Join.OrderTable,,1412705779992.29a4717875aaec774613b15a83ee0850.: Requested memory of 21196 bytes could not be allocated from remaining memory of 21196 bytes from global pool of 40000 bytes after waiting for 0ms.
	at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
	at org.apache.phoenix.util.ServerUtil.throwIOException(ServerUtil.java:51)
	at org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:158)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postScannerOpen(RegionCoprocessorHost.java:1845)
	at org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3092)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29497)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2027)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:98)
	at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
	at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.phoenix.memory.InsufficientMemoryException: Requested memory of 21196 bytes could not be allocated from remaining memory of 21196 bytes from global pool of 40000 bytes after waiting for 0ms.
	at org.apache.phoenix.memory.GlobalMemoryManager.allocateBytes(GlobalMemoryManager.java:81)
	at org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:100)
	at org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:106)
	at org.apache.phoenix.cache.aggcache.SpillableGroupByCache.<init>(SpillableGroupByCache.java:150)
	at org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver$GroupByCacheFactory.newCache(GroupedAggregateRegionObserver.java:365)
	at org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:400)
	at org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:161)
	at org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:140)
	... 8 more

	at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1452)
	at org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1656)
	at org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1714)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:29900)
	at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:308)
	at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:164)
	at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:59)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90)
	at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:282)
	at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:187)
	at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:182)
	at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:109)
	at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:738)
	at org.apache.phoenix.iterate.TableResultIterator.<init>(TableResultIterator.java:54)
	at org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:583)
	at org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:578)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:724)

Running org.apache.phoenix.end2end.UpsertBigValuesIT
Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.023 sec - in org.apache.phoenix.end2end.StatementHintsIT
Running org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.349 sec - in org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
Running org.apache.phoenix.end2end.SortOrderFIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.168 sec - in org.apache.phoenix.end2end.UpsertBigValuesIT
Running org.apache.phoenix.end2end.QueryMoreIT
Tests run: 30, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.02 sec - in org.apache.phoenix.end2end.SortOrderFIT
Running org.apache.phoenix.end2end.ReverseScanIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.162 sec - in org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Running org.apache.phoenix.end2end.RegexpSubstrFunctionIT
Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 89.007 sec - in org.apache.phoenix.end2end.InListIT
Running org.apache.phoenix.end2end.ServerExceptionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.223 sec - in org.apache.phoenix.end2end.ReverseScanIT
Running org.apache.phoenix.end2end.AutoCommitIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.412 sec - in org.apache.phoenix.end2end.RegexpSubstrFunctionIT
Running org.apache.phoenix.end2end.LastValueFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.722 sec - in org.apache.phoenix.end2end.AutoCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.222 sec - in org.apache.phoenix.end2end.ServerExceptionIT
Running org.apache.phoenix.end2end.RoundFloorCeilFunctionsEnd2EndIT
Running org.apache.phoenix.end2end.LpadFunctionIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.048 sec - in org.apache.phoenix.end2end.LastValueFunctionIT
Tests run: 30, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.432 sec - in org.apache.phoenix.end2end.RoundFloorCeilFunctionsEnd2EndIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.335 sec - in org.apache.phoenix.end2end.LpadFunctionIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.973 sec - in org.apache.phoenix.end2end.QueryMoreIT
Tests run: 96, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 95.172 sec - in org.apache.phoenix.end2end.HashJoinIT

Results :

Tests in error: 
  LocalIndexIT.testLocalIndexScanJoinColumnsFromDataTable:423 » PhoenixIO org.ap...
  SubqueryIT.testInSubquery:665 » SQL Encountered exception in sub plan [0] exec...
  SubqueryIT.testExistsSubquery:768 » SQL Encountered exception in sub plan [0] ...
  SubqueryIT.testInSubquery:665 » SQL Encountered exception in sub plan [0] exec...
  SubqueryIT.testExistsSubquery:768 » SQL Encountered exception in sub plan [0] ...
  SubqueryIT.testInSubquery:665 » SQL Encountered exception in sub plan [0] exec...
  SubqueryIT.testExistsSubquery:768 » SQL Encountered exception in sub plan [0] ...

Tests run: 508, Failures: 0, Errors: 7, Skipped: 1

[INFO] 
[INFO] --- maven-failsafe-plugin:2.17:integration-test (NeedTheirOwnClusterTests) @ phoenix-core ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/failsafe-reports>
[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true

-------------------------------------------------------
 T E S T S
-------------------------------------------------------

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Running org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.balancer.IndexLoadBalancerIT
Running org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.522 sec - in org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.581 sec - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Running org.apache.phoenix.end2end.KeyOnlyIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.094 sec - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.912 sec - in org.apache.phoenix.end2end.KeyOnlyIT
Running org.apache.phoenix.end2end.ParallelIteratorsIT
Running org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.451 sec - in org.apache.phoenix.end2end.ParallelIteratorsIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 48.228 sec - in org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 57.745 sec - in org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.end2end.index.DropIndexDuringUpsertIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.506 sec - in org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.3 sec - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.118 sec - in org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Running org.apache.phoenix.end2end.TenantSpecificTablesDMLIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 101.652 sec - in org.apache.phoenix.hbase.index.balancer.IndexLoadBalancerIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.919 sec - in org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.807 sec - in org.apache.phoenix.end2end.TenantSpecificTablesDMLIT
Running org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 73.384 sec - in org.apache.phoenix.end2end.index.DropIndexDuringUpsertIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 137.563 sec - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 176.048 sec - in org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
Build timed out (after 120 minutes). Marking the build as failed.
Build was aborted
Archiving artifacts

Results :

Tests run: 86, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.17:verify (ClientManagedTimeTests) @ phoenix-core ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix .................................... SUCCESS [1.873s]
[INFO] Phoenix Hadoop Compatibility ...................... SUCCESS [2.948s]
[INFO] Phoenix Hadoop2 Compatibility ..................... SUCCESS [2.776s]
[INFO] Phoenix Core ...................................... FAILURE [1:59:51.805s]
[INFO] Phoenix - Flume ................................... SKIPPED
[INFO] Phoenix - Pig ..................................... SKIPPED
[INFO] Phoenix Assembly .................................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1:59:59.867s
[INFO] Finished at: Tue Oct 07 20:08:58 UTC 2014
[INFO] Final Memory: 41M/813M
[INFO] ------------------------------------------------------------------------
Sending artifact delta relative to Phoenix | Master #408
ERROR: Failed to archive artifacts: **/surefire-reports/*,**/failsafe-reports/*.xml,**/*.txt,**/*.jar,**/bin/*.py,**/bin/*.xml,**/bin/*.properties
java.io.IOException: Pipe is already closed
	at hudson.remoting.PipeWindow.checkDeath(PipeWindow.java:108)
	at hudson.remoting.PipeWindow$Real.get(PipeWindow.java:203)
	at hudson.remoting.ProxyOutputStream._write(ProxyOutputStream.java:141)
	at hudson.remoting.ProxyOutputStream.write(ProxyOutputStream.java:109)
	at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
	at java.io.BufferedOutputStream.write(BufferedOutputStream.java:126)
	at java.util.zip.GZIPOutputStream.finish(GZIPOutputStream.java:169)
	at java.util.zip.DeflaterOutputStream.close(DeflaterOutputStream.java:238)
	at jsync.protocol.BaseWriter.close(BaseWriter.java:14)
	at jsync.protocol.BlockIterableWriter.close(BlockIterableWriter.java:26)
	at com.cloudbees.jenkins.plugins.jsync.archiver.JSyncArtifactManager.remoteSync(JSyncArtifactManager.java:127)
	at com.cloudbees.jenkins.plugins.jsync.archiver.JSyncArtifactManager.archive(JSyncArtifactManager.java:67)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:140)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:804)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:776)
	at hudson.model.Build$BuildExecution.post2(Build.java:183)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:725)
	at hudson.model.Run.execute(Run.java:1701)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:231)
Caused by: java.io.IOException: Reader side has already been abandoned
	at hudson.remoting.FastPipedOutputStream.sink(FastPipedOutputStream.java:80)
	at hudson.remoting.FastPipedOutputStream.write(FastPipedOutputStream.java:150)
	at hudson.remoting.FastPipedOutputStream.write(FastPipedOutputStream.java:137)
	at hudson.remoting.ProxyOutputStream$Chunk$1.run(ProxyOutputStream.java:264)
	at hudson.remoting.PipeWriter$1.run(PipeWriter.java:158)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at hudson.remoting.SingleLaneExecutorService$1.run(SingleLaneExecutorService.java:111)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.Throwable
	at hudson.remoting.FastPipedOutputStream.<init>(FastPipedOutputStream.java:49)
	at hudson.remoting.Pipe.readObject(Pipe.java:188)
	at sun.reflect.GeneratedMethodAccessor274.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
	at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
	at hudson.remoting.UserRequest.deserialize(UserRequest.java:182)
	at hudson.remoting.UserRequest.perform(UserRequest.java:98)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:328)
	... 5 more
Recording test results

Jenkins build is back to normal : Phoenix | Master #410

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-master/410/changes>