You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2015/09/19 21:20:30 UTC

Build failed in Jenkins: Phoenix | Master #898

See <https://builds.apache.org/job/Phoenix-master/898/changes>

Changes:

[greid] PHOENIX-2239 Improve strict mode in psql CSV load

[greid] PHOENIX-2238 Support non-printable delimiters

------------------------------------------
[...truncated 120502 lines...]
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:61)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
	... 19 more
Caused by: java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at java.util.concurrent.ThreadPoolExecutor.addWorker(ThreadPoolExecutor.java:949)
	at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1360)
	at org.apache.hadoop.hbase.client.ResultBoundedCompletionService.submit(ResultBoundedCompletionService.java:142)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.addCallsForCurrentReplica(ScannerCallableWithReplicas.java:290)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:169)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:61)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
	... 28 more

	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.dropTable(MetaDataProtos.java:11829)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$8.call(ConnectionQueryServicesImpl.java:1333)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$8.call(ConnectionQueryServicesImpl.java:1319)
	at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:724)

testSelfJoin[0](org.apache.phoenix.end2end.HashJoinIT)  Time elapsed: 1.671 sec  <<< ERROR!
org.apache.phoenix.schema.SequenceAlreadyExistsException: ERROR 1200 (42Z00): Sequence already exists. tableName=MY.SEQ
	at org.apache.phoenix.exception.SQLExceptionCode$11.newException(SQLExceptionCode.java:259)
	at org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:145)
	at org.apache.phoenix.schema.Sequence.createSequence(Sequence.java:577)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.createSequence(ConnectionQueryServicesImpl.java:2259)
	at org.apache.phoenix.schema.MetaDataClient.createSequence(MetaDataClient.java:1347)
	at org.apache.phoenix.schema.MetaDataClient.createSequence(MetaDataClient.java:1338)
	at org.apache.phoenix.compile.CreateSequenceCompiler$1.execute(CreateSequenceCompiler.java:238)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:320)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:312)
	at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:310)
	at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1433)
	at org.apache.phoenix.query.BaseTest.initJoinTableValues(BaseTest.java:1395)
	at org.apache.phoenix.end2end.HashJoinIT.initTable(HashJoinIT.java:93)

testJoinWithSubqueryAndAggregation[1](org.apache.phoenix.end2end.HashJoinIT)  Time elapsed: 32.951 sec  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: java.util.concurrent.ExecutionException: java.io.IOException: Unable to close file because the last block does not have enough number of replicas.
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.util.ForeignExceptionUtil.toIOException(ForeignExceptionUtil.java:45)
	at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.convertResult(HBaseAdmin.java:4206)
	at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.waitProcedureResult(HBaseAdmin.java:4164)
	at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.get(HBaseAdmin.java:4098)
	at org.apache.hadoop.hbase.client.HBaseAdmin.createTable(HBaseAdmin.java:560)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.ensureTableCreated(ConnectionQueryServicesImpl.java:850)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.createTable(ConnectionQueryServicesImpl.java:1223)
	at org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:1937)
	at org.apache.phoenix.schema.MetaDataClient.createIndex(MetaDataClient.java:1288)
	at org.apache.phoenix.compile.CreateIndexCompiler$1.execute(CreateIndexCompiler.java:95)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:320)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:312)
	at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:310)
	at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1433)
	at org.apache.phoenix.end2end.HashJoinIT.initTable(HashJoinIT.java:99)
Caused by: org.apache.hadoop.ipc.RemoteException: java.util.concurrent.ExecutionException: java.io.IOException: Unable to close file because the last block does not have enough number of replicas.
	at org.apache.hadoop.hbase.util.ModifyRegionUtils.createRegions(ModifyRegionUtils.java:186)
	at org.apache.hadoop.hbase.util.ModifyRegionUtils.createRegions(ModifyRegionUtils.java:141)
	at org.apache.hadoop.hbase.util.ModifyRegionUtils.createRegions(ModifyRegionUtils.java:118)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure$3.createHdfsRegions(CreateTableProcedure.java:349)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.createFsLayout(CreateTableProcedure.java:368)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.createFsLayout(CreateTableProcedure.java:342)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.executeFromState(CreateTableProcedure.java:118)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.executeFromState(CreateTableProcedure.java:58)
	at org.apache.hadoop.hbase.procedure2.StateMachineProcedure.execute(StateMachineProcedure.java:107)
	at org.apache.hadoop.hbase.procedure2.Procedure.doExecute(Procedure.java:400)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execProcedure(ProcedureExecutor.java:851)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:661)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:614)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.access$200(ProcedureExecutor.java:70)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor$1.run(ProcedureExecutor.java:405)

Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 93.839 sec - in org.apache.phoenix.iterate.RoundRobinResultIteratorIT
Tests run: 108, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 181.316 sec - in org.apache.phoenix.end2end.SortMergeJoinIT

Results :


Tests in error: 
  ArrayToStringFunctionIT>BaseHBaseManagedTimeIT.cleanUpAfterTest:64->BaseTest.deletePriorTables:788->BaseTest.deletePriorTables:799->BaseTest.deletePriorTables:833 » PhoenixIO
org.apache.phoenix.end2end.ArrayToStringFunctionIT.testArrayToStringFunctionWithUpsertSelect2(org.apache.phoenix.end2end.ArrayToStringFunctionIT)
  Run 1: ArrayToStringFunctionIT.testArrayToStringFunctionWithUpsertSelect2:381 » PhoenixIO
  Run 2: ArrayToStringFunctionIT>BaseHBaseManagedTimeIT.cleanUpAfterTest:64->BaseTest.deletePriorTables:788->BaseTest.deletePriorTables:799->BaseTest.deletePriorTables:833 » PhoenixIO

  ArrayToStringFunctionIT.testArrayToStringFunctionWithUpsertSelect3:415 » PhoenixIO
org.apache.phoenix.end2end.HashJoinIT.testJoinPlanWithIndex[0](org.apache.phoenix.end2end.HashJoinIT)
  Run 1: HashJoinIT.initTable:93->BaseTest.initJoinTableValues:1629 » PhoenixIO org.apa...
  Run 2: HashJoinIT>BaseHBaseManagedTimeIT.cleanUpAfterTest:64->BaseTest.deletePriorTables:788->BaseTest.deletePriorTables:799->BaseTest.deletePriorTables:833 » PhoenixIO

  HashJoinIT.initTable:99 » PhoenixIO java.util.concurrent.ExecutionException: j...
  HashJoinIT.initTable:93->BaseTest.initJoinTableValues:1395 » SequenceAlreadyExists


Tests run: 1166, Failures: 0, Errors: 6, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.18:integration-test (NeedTheirOwnClusterTests) @ phoenix-core ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/failsafe-reports>
[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Running org.apache.phoenix.hbase.index.balancer.IndexLoadBalancerIT
Running org.apache.phoenix.rpc.PhoenixServerRpcIT
Running org.apache.phoenix.rpc.PhoenixClientRpcIT
Running org.apache.phoenix.monitoring.PhoenixMetricsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.704 sec - in org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.493 sec - in org.apache.phoenix.rpc.PhoenixServerRpcIT
Running org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.675 sec - in org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.1 sec - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 17, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.125 sec - in org.apache.phoenix.monitoring.PhoenixMetricsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.886 sec - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Running org.apache.phoenix.end2end.KeyOnlyIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.543 sec - in org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 118.758 sec - in org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.895 sec - in org.apache.phoenix.end2end.KeyOnlyIT
Running org.apache.phoenix.end2end.ParallelIteratorsIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Running org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.394 sec - in org.apache.phoenix.end2end.ParallelIteratorsIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 118.675 sec - in org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.868 sec - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Running org.apache.phoenix.end2end.index.ImmutableIndexWithStatsIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 172.511 sec - in org.apache.phoenix.hbase.index.balancer.IndexLoadBalancerIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.852 sec - in org.apache.phoenix.end2end.index.ImmutableIndexWithStatsIT
Tests run: 17, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.25 sec - in org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.003 sec - in org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.ViewIT
Running org.apache.phoenix.end2end.AlterTableIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.591 sec - in org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.StatsCollectorWithSplitsAndMultiCFIT
Tests run: 22, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 68.082 sec - in org.apache.phoenix.end2end.ViewIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.11 sec - in org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.695 sec - in org.apache.phoenix.end2end.StatsCollectorWithSplitsAndMultiCFIT
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.203 sec - in org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.TenantSpecificTablesDMLIT
Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.SaltedViewIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 163.115 sec - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.166 sec - in org.apache.phoenix.end2end.SaltedViewIT
Tests run: 13, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.457 sec - in org.apache.phoenix.end2end.UserDefinedFunctionsIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.164 sec - in org.apache.phoenix.end2end.TenantSpecificTablesDMLIT
Running org.apache.phoenix.end2end.UnionAllIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.MultiCfQueryExecIT
Running org.apache.phoenix.execute.PartialCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.706 sec - in org.apache.phoenix.end2end.QueryTimeoutIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.594 sec - in org.apache.phoenix.execute.PartialCommitIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.802 sec - in org.apache.phoenix.end2end.MultiCfQueryExecIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.297 sec - in org.apache.phoenix.end2end.UnionAllIT
Running org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.346 sec - in org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndUncompressedWALInHBase_094_9_IT
Running org.apache.phoenix.mapreduce.IndexToolIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.998 sec - in org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
Running org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.075 sec - in org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndUncompressedWALInHBase_094_9_IT
Tests run: 51, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 395.113 sec - in org.apache.phoenix.end2end.AlterTableIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 299.622 sec - in org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 352.849 sec - in org.apache.phoenix.mapreduce.IndexToolIT

Results :




Tests run: 244, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.18:verify (ClientManagedTimeTests) @ phoenix-core ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix .................................... SUCCESS [4.564s]
[INFO] Phoenix Core ...................................... FAILURE [37:20.208s]
[INFO] Phoenix - Flume ................................... SKIPPED
[INFO] Phoenix - Pig ..................................... SKIPPED
[INFO] Phoenix Query Server Client ....................... SKIPPED
[INFO] Phoenix Query Server .............................. SKIPPED
[INFO] Phoenix - Pherf ................................... SKIPPED
[INFO] Phoenix - Spark ................................... SKIPPED
[INFO] Phoenix Assembly .................................. SKIPPED
[INFO] Phoenix - Tracing Web Application ................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 37:25.337s
[INFO] Finished at: Sat Sep 19 19:16:09 UTC 2015
[INFO] Final Memory: 58M/1128M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.18:verify (ClientManagedTimeTests) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Sending artifact delta relative to Phoenix | Master #896
Archived 1342 artifacts
Archive block size is 32768
Received 23260 blocks and 586890110 bytes
Compression is 56.5%
Took 4 min 27 sec
Updating PHOENIX-2239
Updating PHOENIX-2238
Recording test results

Jenkins build is back to normal : Phoenix | Master #900

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-master/900/changes>


Build failed in Jenkins: Phoenix | Master #899

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-master/899/changes>

Changes:

[dumindukarunathilaka] PHOENIX-2205 Group by a divided value (e.g., time/10) returns NULL

------------------------------------------
[...truncated 123148 lines...]
[INFO] Copying 5 resources
[INFO] skip non existing resourceDirectory <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/src/it/resources>
[INFO] Copying 3 resources
[INFO] 
[INFO] --- maven-compiler-plugin:3.0:testCompile (default-testCompile) @ phoenix-pherf ---
[INFO] Changes detected - recompiling the module!
[INFO] Compiling 13 source files to <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/test-classes>
[WARNING] <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RowCalculatorTest.java>: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RowCalculatorTest.java> uses or overrides a deprecated API.
[WARNING] <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/RowCalculatorTest.java>: Recompile with -Xlint:deprecation for details.
[WARNING] <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java>: Some input files use unchecked or unsafe operations.
[WARNING] <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java>: Recompile with -Xlint:unchecked for details.
[INFO] 
[INFO] --- maven-surefire-plugin:2.18:test (default-test) @ phoenix-pherf ---
[INFO] Surefire report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/surefire-reports>

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.phoenix.pherf.ResourceTest
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.612 sec - in org.apache.phoenix.pherf.ResourceTest
Running org.apache.phoenix.pherf.ConfigurationParserTest
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.556 sec - in org.apache.phoenix.pherf.ConfigurationParserTest
Running org.apache.phoenix.pherf.RuleGeneratorTest
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.483 sec - in org.apache.phoenix.pherf.RuleGeneratorTest
Running org.apache.phoenix.pherf.ResultTest
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.854 sec - in org.apache.phoenix.pherf.ResultTest
Running org.apache.phoenix.pherf.RowCalculatorTest
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0 sec - in org.apache.phoenix.pherf.RowCalculatorTest
Running org.apache.phoenix.pherf.PherfTest
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.444 sec - in org.apache.phoenix.pherf.PherfTest
Running org.apache.phoenix.pherf.TestHBaseProps
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.103 sec - in org.apache.phoenix.pherf.TestHBaseProps
Running org.apache.phoenix.pherf.ColumnTest
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0 sec - in org.apache.phoenix.pherf.ColumnTest

Results :




Tests run: 21, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-source-plugin:2.2.1:jar-no-fork (attach-sources) @ phoenix-pherf ---
[INFO] Building jar: <https://builds.apache.org/job/Phoenix-master/899/artifact/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT-sources.jar>
[INFO] 
[INFO] --- maven-jar-plugin:2.4:test-jar (default) @ phoenix-pherf ---
[INFO] Building jar: <https://builds.apache.org/job/Phoenix-master/899/artifact/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT-tests.jar>
[INFO] 
[INFO] --- maven-jar-plugin:2.4:jar (default-jar) @ phoenix-pherf ---
[INFO] Building jar: <https://builds.apache.org/job/Phoenix-master/899/artifact/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT.jar>
[INFO] 
[INFO] --- maven-site-plugin:3.2:attach-descriptor (attach-descriptor) @ phoenix-pherf ---
[INFO] 
[INFO] --- maven-assembly-plugin:2.5.2:single (make-dependency-jar) @ phoenix-pherf ---
[INFO] artifact junit:junit-dep: checking for updates from apache release
[INFO] artifact junit:junit-dep: checking for updates from conjars.org
[INFO] artifact junit:junit-dep: checking for updates from apache snapshot
[INFO] artifact junit:junit-dep: checking for updates from sonatype-nexus-snapshots
[INFO] artifact junit:junit-dep: checking for updates from central
[INFO] artifact junit:junit: checking for updates from apache release
[INFO] artifact junit:junit: checking for updates from conjars.org
[INFO] artifact junit:junit: checking for updates from apache snapshot
[INFO] artifact junit:junit: checking for updates from sonatype-nexus-snapshots
[INFO] artifact junit:junit: checking for updates from central
[INFO] artifact commons-io:commons-io: checking for updates from apache release
[INFO] artifact commons-io:commons-io: checking for updates from conjars.org
[INFO] artifact commons-io:commons-io: checking for updates from apache snapshot
[INFO] artifact commons-io:commons-io: checking for updates from sonatype-nexus-snapshots
[INFO] artifact commons-io:commons-io: checking for updates from central
[INFO] Building jar: <https://builds.apache.org/job/Phoenix-master/899/artifact/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT-jar-with-dependencies.jar>
[INFO] 
[INFO] --- maven-assembly-plugin:2.5.2:single (make-assembly) @ phoenix-pherf ---
[INFO] Reading assembly descriptor: src/main/assembly/minimal.xml
[INFO] Reading assembly descriptor: src/main/assembly/standalone.xml
[INFO] Reading assembly descriptor: src/main/assembly/cluster.xml
[INFO] artifact junit:junit-dep: checking for updates from apache release
[INFO] artifact junit:junit-dep: checking for updates from conjars.org
[INFO] artifact junit:junit-dep: checking for updates from apache snapshot
[INFO] artifact junit:junit-dep: checking for updates from sonatype-nexus-snapshots
[INFO] artifact junit:junit-dep: checking for updates from central
[INFO] artifact junit:junit: checking for updates from apache release
[INFO] artifact junit:junit: checking for updates from conjars.org
[INFO] artifact junit:junit: checking for updates from apache snapshot
[INFO] artifact junit:junit: checking for updates from sonatype-nexus-snapshots
[INFO] artifact junit:junit: checking for updates from central
[INFO] artifact commons-io:commons-io: checking for updates from apache release
[INFO] artifact commons-io:commons-io: checking for updates from conjars.org
[INFO] artifact commons-io:commons-io: checking for updates from apache snapshot
[INFO] artifact commons-io:commons-io: checking for updates from sonatype-nexus-snapshots
[INFO] artifact commons-io:commons-io: checking for updates from central
[INFO] Building jar: <https://builds.apache.org/job/Phoenix-master/899/artifact/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT-minimal.jar>
[INFO] Building zip: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT-standalone.zip>
[INFO] Building zip: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/phoenix-pherf-4.6.0-HBase-1.1-SNAPSHOT-cluster.zip>
[INFO] 
[INFO] --- maven-failsafe-plugin:2.18:integration-test (ClientManagedTimeTests) @ phoenix-pherf ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/failsafe-reports>
[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true

-------------------------------------------------------
 T E S T S
-------------------------------------------------------

Results :




Tests run: 0, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.18:integration-test (HBaseManagedTimeTests) @ phoenix-pherf ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/failsafe-reports>
[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.phoenix.pherf.SchemaReaderIT
Running org.apache.phoenix.pherf.PherfMainIT
Running org.apache.phoenix.pherf.DataIngestIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 37.548 sec - in org.apache.phoenix.pherf.SchemaReaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.89 sec - in org.apache.phoenix.pherf.PherfMainIT
Tests run: 4, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 59.106 sec <<< FAILURE! - in org.apache.phoenix.pherf.DataIngestIT
testColumnRulesApplied(org.apache.phoenix.pherf.DataIngestIT)  Time elapsed: 4.024 sec  <<< ERROR!
java.sql.SQLException: ERROR 1010 (42M01): Not allowed to mutate table. tableName=PHERF.TEST_MULTI_TENANT_TABLE
	at org.apache.phoenix.exception.SQLExceptionCode$Factory$1.newException(SQLExceptionCode.java:389)
	at org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:145)
	at org.apache.phoenix.schema.MetaDataClient.dropTable(MetaDataClient.java:2130)
	at org.apache.phoenix.schema.MetaDataClient.dropTable(MetaDataClient.java:2043)
	at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableDropTableStatement$1.execute(PhoenixStatement.java:878)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:320)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:312)
	at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:310)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1422)
	at org.apache.phoenix.query.BaseTest.deletePriorTables(BaseTest.java:833)
	at org.apache.phoenix.query.BaseTest.deletePriorTables(BaseTest.java:799)
	at org.apache.phoenix.query.BaseTest.deletePriorTables(BaseTest.java:788)
	at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.cleanUpAfterTest(BaseHBaseManagedTimeIT.java:64)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
	at org.junit.runners.Suite.runChild(Suite.java:128)
	at org.junit.runners.Suite.runChild(Suite.java:27)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:115)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.createRequestAndRun(JUnitCoreWrapper.java:107)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.executeLazy(JUnitCoreWrapper.java:88)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.execute(JUnitCoreWrapper.java:57)
	at org.apache.maven.surefire.junitcore.JUnitCoreProvider.invoke(JUnitCoreProvider.java:144)
	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:203)
	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:155)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)


Results :


Tests in error: 
  DataIngestIT>BaseHBaseManagedTimeIT.cleanUpAfterTest:64->BaseTest.deletePriorTables:788->BaseTest.deletePriorTables:799->BaseTest.deletePriorTables:833 » SQL


Tests run: 6, Failures: 0, Errors: 1, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.18:integration-test (NeedTheirOwnClusterTests) @ phoenix-pherf ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/failsafe-reports>
[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true

-------------------------------------------------------
 T E S T S
-------------------------------------------------------

Results :




Tests run: 0, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.18:verify (ClientManagedTimeTests) @ phoenix-pherf ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix .................................... SUCCESS [3.497s]
[INFO] Phoenix Core ...................................... SUCCESS [53:16.891s]
[INFO] Phoenix - Flume ................................... SUCCESS [1:12.205s]
[INFO] Phoenix - Pig ..................................... SUCCESS [2:36.225s]
[INFO] Phoenix Query Server Client ....................... SUCCESS [1.192s]
[INFO] Phoenix Query Server .............................. SUCCESS [1:58.422s]
[INFO] Phoenix - Pherf ................................... FAILURE [2:07.780s]
[INFO] Phoenix - Spark ................................... SKIPPED
[INFO] Phoenix Assembly .................................. SKIPPED
[INFO] Phoenix - Tracing Web Application ................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1:01:16.831s
[INFO] Finished at: Mon Sep 21 12:54:53 UTC 2015
[INFO] Final Memory: 68M/1141M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.18:verify (ClientManagedTimeTests) on project phoenix-pherf: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-master/ws/phoenix-pherf/target/failsafe-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-pherf
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Sending artifact delta relative to Phoenix | Master #896
Archived 1306 artifacts
Archive block size is 32768
Received 21628 blocks and 576454625 bytes
Compression is 55.1%
Took 4 min 1 sec
Updating PHOENIX-2205
Recording test results