You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2014/10/29 00:35:21 UTC

Build failed in Jenkins: Phoenix | 3.0 | Hadoop1 #292

See <https://builds.apache.org/job/Phoenix-3.0-hadoop1/292/changes>

Changes:

[mujtaba] Update CHANGES

------------------------------------------
[...truncated 16300 lines...]
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:520)
	at org.apache.phoenix.end2end.ParallelIteratorsIT.doSetup(ParallelIteratorsIT.java:70)

Tests run: 2, Failures: 0, Errors: 2, Skipped: 0, Time elapsed: 2.547 sec <<< FAILURE! - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
testWriteFailureWithRegionServerDown(org.apache.phoenix.end2end.index.MutableIndexFailureIT)  Time elapsed: 1.491 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:563)
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT.doSetup(MutableIndexFailureIT.java:99)

testWriteFailureDisablesIndex(org.apache.phoenix.end2end.index.MutableIndexFailureIT)  Time elapsed: 1.055 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:563)
	at org.apache.phoenix.end2end.index.MutableIndexFailureIT.doSetup(MutableIndexFailureIT.java:99)

Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 1,494,930.942 sec <<< FAILURE! - in org.apache.phoenix.end2end.TenantSpecificTablesDDLIT
org.apache.phoenix.end2end.TenantSpecificTablesDDLIT  Time elapsed: 1,494,930.942 sec  <<< ERROR!
java.lang.RuntimeException: java.lang.NullPointerException
	at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:563)
	at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:480)
	at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:466)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:520)
	at org.apache.phoenix.end2end.BaseTenantSpecificTablesIT.doSetup(BaseTenantSpecificTablesIT.java:80)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runners.Suite.runChild(Suite.java:127)
	at org.junit.runners.Suite.runChild(Suite.java:26)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:138)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.createRequestAndRun(JUnitCoreWrapper.java:113)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.executeLazy(JUnitCoreWrapper.java:94)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.execute(JUnitCoreWrapper.java:58)
	at org.apache.maven.surefire.junitcore.JUnitCoreProvider.invoke(JUnitCoreProvider.java:134)
	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)
Caused by: java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:546)
	at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:480)
	at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:466)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:520)
	at org.apache.phoenix.end2end.BaseTenantSpecificTablesIT.doSetup(BaseTenantSpecificTablesIT.java:80)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runners.Suite.runChild(Suite.java:127)
	at org.junit.runners.Suite.runChild(Suite.java:26)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:138)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.createRequestAndRun(JUnitCoreWrapper.java:113)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.executeLazy(JUnitCoreWrapper.java:94)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.execute(JUnitCoreWrapper.java:58)
	at org.apache.maven.surefire.junitcore.JUnitCoreProvider.invoke(JUnitCoreProvider.java:134)
	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)

Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 1,494,931.378 sec <<< FAILURE! - in org.apache.phoenix.end2end.ContextClassloaderIT
org.apache.phoenix.end2end.ContextClassloaderIT  Time elapsed: 1,494,931.377 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.end2end.ContextClassloaderIT.setUpBeforeClass(ContextClassloaderIT.java:62)

Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 8.386 sec <<< FAILURE! - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT  Time elapsed: 6.894 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT.setupCluster(EndToEndCoveredColumnsIndexBuilderIT.java:105)

Tests run: 2, Failures: 0, Errors: 2, Skipped: 0, Time elapsed: 1,494,934.309 sec <<< FAILURE! - in org.apache.phoenix.mapreduce.CsvBulkLoadToolIT
org.apache.phoenix.mapreduce.CsvBulkLoadToolIT  Time elapsed: 1,494,934.309 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.mapreduce.CsvBulkLoadToolIT.setUp(CsvBulkLoadToolIT.java:58)

org.apache.phoenix.mapreduce.CsvBulkLoadToolIT  Time elapsed: 1,494,934.309 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.phoenix.mapreduce.CsvBulkLoadToolIT.tearDownAfterClass(CsvBulkLoadToolIT.java:70)

Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 1,494,934.933 sec <<< FAILURE! - in org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT  Time elapsed: 1,494,934.933 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT.setupCluster(EndtoEndIndexingWithCompressionIT.java:49)

Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 1,494,934.958 sec <<< FAILURE! - in org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT  Time elapsed: 1,494,934.958 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT.setupCluster(FailWithoutRetriesIT.java:94)

Tests run: 2, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 1.155 sec <<< FAILURE! - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT)  Time elapsed: 1.112 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT.testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(FailForUnsupportedHBaseVersionsIT.java:121)

Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 11.974 sec <<< FAILURE! - in org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT  Time elapsed: 10.482 sec  <<< ERROR!
java.lang.NullPointerException: null
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:422)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:280)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:452)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:620)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:576)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:545)
	at org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT.setupCluster(EndToEndCoveredIndexingIT.java:112)


Results :

Tests in error: 
  KeyOnlyIT.doSetup:55->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  StatsCollectorIT.doSetup:59->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  SpillableGroupByIT.doSetup:73->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  MultiCfQueryExecIT.doSetup:56->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  MutableIndexFailureIT.doSetup:99 » NullPointer
  SaltedViewIT>BaseViewIT.doSetup:51->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  ViewIT>BaseViewIT.doSetup:51->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  TenantSpecificTablesDMLIT>BaseTenantSpecificTablesIT.doSetup:80->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  MutableIndexFailureIT.doSetup:99 » NullPointer
  ParallelIteratorsIT.doSetup:70->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  TenantSpecificTablesDDLIT>BaseTenantSpecificTablesIT.doSetup:80->BaseTest.setUpTestDriver:520->BaseTest.checkClusterInitialized:466->BaseTest.setUpTestCluster:480->BaseTest.initMiniCluster:563 » Runtime
  ContextClassloaderIT.setUpBeforeClass:62 » NullPointer
  EndToEndCoveredColumnsIndexBuilderIT.setupCluster:105 » NullPointer
  CsvBulkLoadToolIT.setUp:58 » NullPointer
  CsvBulkLoadToolIT.tearDownAfterClass:70 NullPointer
  EndtoEndIndexingWithCompressionIT.setupCluster:49 » NullPointer
  FailWithoutRetriesIT.setupCluster:94 » NullPointer
  FailForUnsupportedHBaseVersionsIT.testDoesNotStartRegionServerForUnsupportedCompressionAndVersion:121 » NullPointer
  EndToEndCoveredIndexingIT.setupCluster:112 » NullPointer

Tests run: 20, Failures: 0, Errors: 19, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.17:verify (HBaseManagedTimeTests) @ phoenix-core ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/Phoenix-3.0-hadoop1/ws/phoenix-core/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix .................................... SUCCESS [2.778s]
[INFO] Phoenix Hadoop Compatibility ...................... SUCCESS [0.470s]
[INFO] Phoenix Core ...................................... FAILURE [2:44.542s]
[INFO] Phoenix - Flume ................................... SKIPPED
[INFO] Phoenix - Pig ..................................... SKIPPED
[INFO] Phoenix Hadoop1 Compatibility ..................... SKIPPED
[INFO] Phoenix Assembly .................................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 2:49.302s
[INFO] Finished at: Tue Oct 28 23:33:00 UTC 2014
[INFO] Final Memory: 36M/410M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.17:verify (HBaseManagedTimeTests) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-3.0-hadoop1/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Sending artifact delta relative to Phoenix | 3.0 | Hadoop1 #291
Archived 437 artifacts
Archive block size is 32768
Received 6 blocks and 47653881 bytes
Compression is 0.4%
Took 23 sec
Recording test results

Build failed in Jenkins: Phoenix | 3.0 | Hadoop1 #293

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-3.0-hadoop1/293/>

------------------------------------------
Started by user mujtaba
Building remotely on jenkins-ubuntu-1404-4gb-b85 (jenkins-cloud-4GB Ubuntu ubuntu) in workspace <https://builds.apache.org/job/Phoenix-3.0-hadoop1/ws/>
 > git rev-parse --is-inside-work-tree
Fetching changes from the remote Git repository
 > git config remote.origin.url https://git-wip-us.apache.org/repos/asf/phoenix.git
Fetching upstream changes from https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version
 > git fetch --tags --progress https://git-wip-us.apache.org/repos/asf/phoenix.git +refs/heads/*:refs/remotes/origin/*
 > git rev-parse origin/3.0^{commit}
Checking out Revision 8b460b5c086f46b3e636133df90932c80c1a643a (origin/3.0)
 > git config core.sparsecheckout
 > git checkout -f 8b460b5c086f46b3e636133df90932c80c1a643a
 > git rev-list 8b460b5c086f46b3e636133df90932c80c1a643a
No emails were triggered.
FATAL: null
java.io.IOException
	at hudson.remoting.FastPipedInputStream.read(FastPipedInputStream.java:177)
	at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:283)
	at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:325)
	at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:177)
	at java.io.InputStreamReader.read(InputStreamReader.java:184)
	at java.io.Reader.read(Reader.java:140)
	at org.apache.commons.io.IOUtils.copyLarge(IOUtils.java:2001)
	at org.apache.commons.io.IOUtils.copyLarge(IOUtils.java:1980)
	at org.apache.commons.io.IOUtils.copy(IOUtils.java:1957)
	at org.apache.commons.io.IOUtils.copy(IOUtils.java:1907)
	at org.apache.commons.io.IOUtils.toString(IOUtils.java:778)
	at org.apache.commons.io.IOUtils.toString(IOUtils.java:759)
	at hudson.FilePath.readToString(FilePath.java:1658)
	at hudson.tools.DownloadFromUrlInstaller.isUpToDate(DownloadFromUrlInstaller.java:43)
	at hudson.tools.DownloadFromUrlInstaller.performInstallation(DownloadFromUrlInstaller.java:67)
	at hudson.tools.InstallerTranslator.getToolHome(InstallerTranslator.java:61)
	at hudson.tools.ToolLocationNodeProperty.getToolHome(ToolLocationNodeProperty.java:107)
	at hudson.tools.ToolInstallation.translateFor(ToolInstallation.java:204)
	at hudson.tasks.Maven$MavenInstallation.forNode(Maven.java:610)
	at hudson.tasks.Maven.perform(Maven.java:289)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:804)
	at hudson.model.Build$BuildExecution.build(Build.java:199)
	at hudson.model.Build$BuildExecution.doRun(Build.java:160)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:585)
	at hudson.model.Run.execute(Run.java:1676)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:231)

Jenkins build is back to normal : Phoenix | 3.0 | Hadoop1 #296

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-3.0-hadoop1/296/>