You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2017/09/01 04:35:37 UTC

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #556

See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/556/display/redirect?page=changes>

Changes:

[samarth] PHOENIX-4143 ConcurrentMutationsIT flaps

------------------------------------------
[...truncated 313.95 KB...]
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:495)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:490)
	at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
	at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Exception encountered when invoking run on a nested suite - java.io.IOException: Cannot create directory <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-spark/target/test-data/0d64f549-02fa-47ab-9bb8-64923df8194e/dfscluster_ae6ad09a-fdf4-4930-9bab-f28c8aa9a304/dfs/name1/current> *** ABORTED ***
  java.lang.RuntimeException: java.io.IOException: Cannot create directory <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-spark/target/test-data/0d64f549-02fa-47ab-9bb8-64923df8194e/dfscluster_ae6ad09a-fdf4-4930-9bab-f28c8aa9a304/dfs/name1/current>
  at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:525)
  at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:442)
  at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:424)
  at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:495)
  at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:490)
  at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
  at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
  at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
  at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
  at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
  ...
  Cause: java.io.IOException: Cannot create directory <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-spark/target/test-data/0d64f549-02fa-47ab-9bb8-64923df8194e/dfscluster_ae6ad09a-fdf4-4930-9bab-f28c8aa9a304/dfs/name1/current>
  at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:337)
  at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:548)
  at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:569)
  at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:161)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:991)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:342)
  at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:176)
  at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:973)
  at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:811)
  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:742)
  ...
4743 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42955 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
5688 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42416 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
5920 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42420 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
7075 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42434 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
7532 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42452 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
7549 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42498 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
54381 [ScalaTest-3] INFO  org.spark_project.jetty.util.log  - Logging initialized @57415ms
54566 [ScalaTest-3] INFO  org.spark_project.jetty.server.Server  - jetty-9.2.z-SNAPSHOT
54591 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7d65a6e5{/jobs,null,AVAILABLE}
54591 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1cb044b4{/jobs/json,null,AVAILABLE}
54591 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@786e9422{/jobs/job,null,AVAILABLE}
54592 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@23bf81f3{/jobs/job/json,null,AVAILABLE}
54592 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@71ec8ada{/stages,null,AVAILABLE}
54592 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@609028a4{/stages/json,null,AVAILABLE}
54592 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@185c5883{/stages/stage,null,AVAILABLE}
54593 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@56b70cf3{/stages/stage/json,null,AVAILABLE}
54593 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@bb351ff{/stages/pool,null,AVAILABLE}
54593 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@29cc4259{/stages/pool/json,null,AVAILABLE}
54593 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2117f67f{/storage,null,AVAILABLE}
54594 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2c381cd2{/storage/json,null,AVAILABLE}
54594 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@17ec35ac{/storage/rdd,null,AVAILABLE}
54594 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@14b7bb79{/storage/rdd/json,null,AVAILABLE}
54594 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2a750f8f{/environment,null,AVAILABLE}
54594 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6b508b0f{/environment/json,null,AVAILABLE}
54595 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@74d9bcd3{/executors,null,AVAILABLE}
54595 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2fbfc94{/executors/json,null,AVAILABLE}
54595 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6d835a13{/executors/threadDump,null,AVAILABLE}
54595 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4a8ecd90{/executors/threadDump/json,null,AVAILABLE}
54602 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6559cf2d{/static,null,AVAILABLE}
54602 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6b30c756{/,null,AVAILABLE}
54603 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4f6f1614{/api,null,AVAILABLE}
54603 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@612a3ca{/stages/stage/kill,null,AVAILABLE}
54609 [ScalaTest-3] INFO  org.spark_project.jetty.server.ServerConnector  - Started ServerConnector@2bb4c939{HTTP/1.1}{0.0.0.0:4040}
54609 [ScalaTest-3] INFO  org.spark_project.jetty.server.Server  - Started @57645ms
54922 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@cafef7c{/metrics/json,null,AVAILABLE}
57076 [ScalaTest-3-running-PhoenixSparkIT] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5e3ef3bf{/SQL,null,AVAILABLE}
57076 [ScalaTest-3-running-PhoenixSparkIT] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@3a8c57ec{/SQL/json,null,AVAILABLE}
57077 [ScalaTest-3-running-PhoenixSparkIT] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@317ba637{/SQL/execution,null,AVAILABLE}
57078 [ScalaTest-3-running-PhoenixSparkIT] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@18c3611{/SQL/execution/json,null,AVAILABLE}
57079 [ScalaTest-3-running-PhoenixSparkIT] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@8b2b015{/static/sql,null,AVAILABLE}
59022 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42636 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
59031 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42682 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can persist data with case senstive columns (like in avro schema) using 'DataFrame.saveToPhoenix'
- Can convert Phoenix schema
61043 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42642 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
61051 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42688 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
61181 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42648 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
61189 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42694 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD and execute query
63230 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42654 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
63241 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42700 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD and execute query on case sensitive table (no config)
63640 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42662 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
63658 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42708 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
63729 [RpcServer.reader=0,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42668 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
63742 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42714 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD and execute constrained query
64650 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42674 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
64661 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42720 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD with predicate that will never match
64910 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42692 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
64925 [RpcServer.reader=0,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42738 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD with complex predicate
65171 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42698 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
65184 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42744 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can query an array table
65470 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42704 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
65486 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42750 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read a table as an RDD
- Can save to phoenix table
- Can save Java and Joda dates to Phoenix (no config)
- Can infer schema without defining columns
66414 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42710 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
66426 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42756 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Spark SQL can use Phoenix as a data source with no schema specified
66830 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42716 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
66838 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42762 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Spark SQL can use Phoenix as a data source with PrunedFilteredScan
67170 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42722 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
67180 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42768 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can persist a dataframe using 'DataFrame.saveToPhoenix'
67647 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42728 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
67657 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42774 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can persist a dataframe using 'DataFrame.save()
- Can save arrays back to phoenix
68030 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42734 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
68041 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42780 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
68158 [RpcServer.reader=0,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42740 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
68176 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42786 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from table with schema and escaped table name
- Ensure DataFrame field normalization (PHOENIX-2196)
68752 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42748 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
68764 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42794 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69048 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42754 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69055 [RpcServer.reader=0,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42800 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69256 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42760 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69271 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42806 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69501 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42766 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69510 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42812 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69744 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42774 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69757 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42820 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69984 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42780 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69995 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42826 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70225 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42788 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70236 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42834 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70457 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42794 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70468 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42840 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70726 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42800 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70735 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42846 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70924 [RpcServer.reader=0,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42806 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
70933 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42852 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
71154 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42812 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
71163 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42858 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Ensure Dataframe supports LIKE and IN filters (PHOENIX-2328)
71446 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42818 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
71456 [RpcServer.reader=0,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42864 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load decimal types with accurate precision and scale (PHOENIX-2288)
71739 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42824 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
71746 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42870 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
71912 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42830 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
71922 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42876 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load small and tiny integeger types (PHOENIX-2426)
- Can save arrays from custom dataframes back to phoenix
- Can save arrays of AnyVal type back to phoenix
- Can save arrays of Byte type back to phoenix
- Can save binary types back to phoenix
72956 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42836 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
72968 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42882 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load Phoenix DATE columns through DataFrame API
73290 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42842 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
73301 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42888 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can coerce Phoenix DATE columns to TIMESTAMP through DataFrame API
73657 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42850 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
73668 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42896 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load Phoenix Time columns through DataFrame API
74149 [RpcServer.reader=8,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42856 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
74159 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42902 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
74680 [RpcServer.reader=9,bindAddress=asf927.gq1.ygridcore.net,port=43617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42862 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
74690 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43567] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 42908 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- can read all Phoenix data types
74777 [ScalaTest-3] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@2bb4c939{HTTP/1.1}{0.0.0.0:4040}
74779 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@612a3ca{/stages/stage/kill,null,UNAVAILABLE}
74779 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4f6f1614{/api,null,UNAVAILABLE}
74779 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6b30c756{/,null,UNAVAILABLE}
74779 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6559cf2d{/static,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4a8ecd90{/executors/threadDump/json,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6d835a13{/executors/threadDump,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2fbfc94{/executors/json,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@74d9bcd3{/executors,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6b508b0f{/environment/json,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2a750f8f{/environment,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@14b7bb79{/storage/rdd/json,null,UNAVAILABLE}
74780 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@17ec35ac{/storage/rdd,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2c381cd2{/storage/json,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2117f67f{/storage,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@29cc4259{/stages/pool/json,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@bb351ff{/stages/pool,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@56b70cf3{/stages/stage/json,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@185c5883{/stages/stage,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@609028a4{/stages/json,null,UNAVAILABLE}
74781 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@71ec8ada{/stages,null,UNAVAILABLE}
74782 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@23bf81f3{/jobs/job/json,null,UNAVAILABLE}
74782 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@786e9422{/jobs/job,null,UNAVAILABLE}
74782 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1cb044b4{/jobs/json,null,UNAVAILABLE}
74782 [ScalaTest-3] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7d65a6e5{/jobs,null,UNAVAILABLE}
Run completed in 2 minutes, 36 seconds.
Total number of tests run: 30
Suites: completed 3, aborted 1
Tests: succeeded 30, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  1.762 s]
[INFO] Phoenix Core ....................................... SUCCESS [  01:50 h]
[INFO] Phoenix - Flume .................................... SUCCESS [01:35 min]
[INFO] Phoenix - Kafka .................................... SUCCESS [02:15 min]
[INFO] Phoenix - Pig ...................................... SUCCESS [03:40 min]
[INFO] Phoenix Query Server Client ........................ SUCCESS [ 12.954 s]
[INFO] Phoenix Query Server ............................... SUCCESS [02:09 min]
[INFO] Phoenix - Pherf .................................... SUCCESS [01:49 min]
[INFO] Phoenix - Spark .................................... FAILURE [03:04 min]
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:05 h
[INFO] Finished at: 2017-09-01T04:29:01Z
[INFO] Final Memory: 112M/1408M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (integration-test) on project phoenix-spark: There are test failures -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-spark
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
[Fast Archiver] Compressed 1.14 GB of artifacts by 30.3% relative to #550
Recording test results

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #561

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/561/display/redirect?page=changes>

Changes:

[samarth] PHOENIX-4155 Convert CreateTableIT to extend ParallelStatsDisabledIT

------------------------------------------
[...truncated 98.34 KB...]
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexIT
[INFO] Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 215.017 s - in org.apache.phoenix.end2end.UpgradeIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexSplitForwardScanIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 319.569 s - in org.apache.phoenix.end2end.index.DropColumnIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 153.704 s - in org.apache.phoenix.end2end.index.MutableIndexSplitForwardScanIT
[INFO] Running org.apache.phoenix.end2end.index.SaltedIndexIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.487 s - in org.apache.phoenix.end2end.index.SaltedIndexIT
[INFO] Running org.apache.phoenix.end2end.index.ViewIndexIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexSplitReverseScanIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 52.127 s - in org.apache.phoenix.end2end.index.ViewIndexIT
[INFO] Running org.apache.phoenix.end2end.index.txn.MutableRollbackIT
[INFO] Tests run: 67, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 466.094 s - in org.apache.phoenix.end2end.index.IndexExpressionIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 149.365 s - in org.apache.phoenix.end2end.index.MutableIndexSplitReverseScanIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 70.451 s - in org.apache.phoenix.end2end.index.txn.MutableRollbackIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.27 s - in org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
[INFO] Running org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.071 s - in org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
[INFO] Running org.apache.phoenix.iterate.RoundRobinResultIteratorIT
[INFO] Running org.apache.phoenix.end2end.index.txn.RollbackIT
[INFO] Tests run: 102, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1,124.463 s - in org.apache.phoenix.end2end.SortMergeJoinIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.515 s - in org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
[INFO] Running org.apache.phoenix.rpc.UpdateCacheIT
[INFO] Running org.apache.phoenix.replication.SystemCatalogWALEntryFilterIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.065 s - in org.apache.phoenix.replication.SystemCatalogWALEntryFilterIT
[INFO] Running org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.13 s - in org.apache.phoenix.rpc.UpdateCacheIT
[INFO] Running org.apache.phoenix.trace.PhoenixTracingEndToEndIT
[INFO] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.866 s - in org.apache.phoenix.iterate.RoundRobinResultIteratorIT
[INFO] Running org.apache.phoenix.tx.FlappingTransactionIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.308 s - in org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
[INFO] Running org.apache.phoenix.tx.ParameterizedTransactionIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.377 s - in org.apache.phoenix.tx.FlappingTransactionIT
[INFO] Running org.apache.phoenix.tx.TransactionIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 54.67 s - in org.apache.phoenix.end2end.index.txn.RollbackIT
[INFO] Running org.apache.phoenix.tx.TxCheckpointIT
[INFO] Tests run: 64, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 491.996 s - in org.apache.phoenix.end2end.index.MutableIndexIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 56.738 s - in org.apache.phoenix.tx.TransactionIT
[INFO] Running org.apache.phoenix.util.IndexScrutinyIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 88.257 s - in org.apache.phoenix.trace.PhoenixTracingEndToEndIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.541 s - in org.apache.phoenix.util.IndexScrutinyIT
[WARNING] Tests run: 52, Failures: 0, Errors: 0, Skipped: 4, Time elapsed: 228.298 s - in org.apache.phoenix.tx.ParameterizedTransactionIT
[INFO] Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 293.075 s - in org.apache.phoenix.tx.TxCheckpointIT
[INFO] Tests run: 304, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2,065.826 s - in org.apache.phoenix.end2end.index.IndexIT
[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Failures: 
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshotWithLimit:117->configureJob:130
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshots:78->configureJob:130
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshotsWithCondition:96->configureJob:130
[INFO] 
[ERROR] Tests run: 3059, Failures: 3, Errors: 0, Skipped: 5
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (ClientManagedTimeTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.phoenix.end2end.CustomEntityDataIT
[INFO] Running org.apache.phoenix.end2end.DerivedTableIT
[INFO] Running org.apache.phoenix.end2end.ExtendedQueryExecIT
[INFO] Running org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.DistinctCountIT
[INFO] Running org.apache.phoenix.end2end.CreateSchemaIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.517 s - in org.apache.phoenix.end2end.CreateSchemaIT
[INFO] Running org.apache.phoenix.end2end.FunkyNamesIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.391 s - in org.apache.phoenix.end2end.CustomEntityDataIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.532 s - in org.apache.phoenix.end2end.ExtendedQueryExecIT
[INFO] Running org.apache.phoenix.end2end.ProductMetricsIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.757 s - in org.apache.phoenix.end2end.FunkyNamesIT
[INFO] Running org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.379 s - in org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.ReadIsolationLevelIT
[INFO] Running org.apache.phoenix.end2end.NativeHBaseTypesIT
[INFO] Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.442 s - in org.apache.phoenix.end2end.DerivedTableIT
[INFO] Running org.apache.phoenix.end2end.RowValueConstructorIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.356 s - in org.apache.phoenix.end2end.ReadIsolationLevelIT
[INFO] Running org.apache.phoenix.end2end.SequenceBulkAllocationIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.47 s - in org.apache.phoenix.end2end.DistinctCountIT
[INFO] Running org.apache.phoenix.end2end.SequenceIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.514 s - in org.apache.phoenix.end2end.NativeHBaseTypesIT
[INFO] Running org.apache.phoenix.end2end.ToNumberFunctionIT
[INFO] Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.399 s - in org.apache.phoenix.end2end.ToNumberFunctionIT
[INFO] Running org.apache.phoenix.end2end.TopNIT
[INFO] Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 23.251 s - in org.apache.phoenix.end2end.ProductMetricsIT
[INFO] Running org.apache.phoenix.end2end.TruncateFunctionIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.826 s - in org.apache.phoenix.end2end.TruncateFunctionIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.68 s - in org.apache.phoenix.end2end.TopNIT
[INFO] Running org.apache.phoenix.end2end.UpsertValuesIT
[INFO] Running org.apache.phoenix.end2end.VariableLengthPKIT
[INFO] Tests run: 56, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.502 s - in org.apache.phoenix.end2end.SequenceBulkAllocationIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.781 s - in org.apache.phoenix.end2end.salted.SaltedTableIT
[INFO] Running org.apache.phoenix.rpc.UpdateCacheWithScnIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.159 s - in org.apache.phoenix.rpc.UpdateCacheWithScnIT
[INFO] Tests run: 54, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 52.195 s - in org.apache.phoenix.end2end.SequenceIT
[INFO] Tests run: 50, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 46.472 s - in org.apache.phoenix.end2end.VariableLengthPKIT
[INFO] Tests run: 46, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 122.404 s - in org.apache.phoenix.end2end.RowValueConstructorIT
[INFO] Tests run: 19, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 138.324 s - in org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
[INFO] Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 121.721 s - in org.apache.phoenix.end2end.UpsertValuesIT
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 393, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (HBaseManagedTimeTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 0, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (NeedTheirOwnClusterTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Running org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Running org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.43 s - in org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.732 s - in org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Running org.apache.phoenix.end2end.ArrayIT
[INFO] Running org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Running org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.665 s - in org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.13 s - in org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.539 s - in org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.45 s - in org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.1 s - in org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 74.619 s - in org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 76.157 s - in org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.QueryWithLimitIT
[INFO] Running org.apache.phoenix.end2end.QueryTimeoutIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.024 s - in org.apache.phoenix.end2end.QueryWithLimitIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.373 s - in org.apache.phoenix.end2end.QueryTimeoutIT
[INFO] Running org.apache.phoenix.end2end.RebuildIndexConnectionPropsIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.53 s - in org.apache.phoenix.end2end.RebuildIndexConnectionPropsIT
[INFO] Running org.apache.phoenix.end2end.RegexBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.RenewLeaseIT
[INFO] Running org.apache.phoenix.end2end.SpillableGroupByIT
[INFO] Tests run: 80, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 195.545 s - in org.apache.phoenix.end2end.ArrayIT
[INFO] Running org.apache.phoenix.end2end.StatsCollectorIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.417 s - in org.apache.phoenix.end2end.RenewLeaseIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.645 s - in org.apache.phoenix.end2end.SpillableGroupByIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 71.674 s - in org.apache.phoenix.end2end.RegexBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
[INFO] Running org.apache.phoenix.end2end.UpdateCacheAcrossDifferentClientsIT
[INFO] Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
[INFO] Running org.apache.phoenix.end2end.index.ImmutableIndexIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 34.928 s - in org.apache.phoenix.end2end.UpdateCacheAcrossDifferentClientsIT
[INFO] Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 54.99 s - in org.apache.phoenix.end2end.UserDefinedFunctionsIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
[WARNING] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.002 s - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
[INFO] Running org.apache.phoenix.end2end.index.LocalIndexIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.459 s - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
[WARNING] Tests run: 24, Failures: 0, Errors: 0, Skipped: 16, Time elapsed: 115.71 s - in org.apache.phoenix.end2end.index.ImmutableIndexIT
[INFO] Running org.apache.phoenix.end2end.index.PartialIndexRebuilderIT
[INFO] Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.918 s - in org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
[INFO] Tests run: 140, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 489.119 s - in org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Running org.apache.phoenix.execute.PartialCommitIT
[INFO] Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.207 s - in org.apache.phoenix.execute.PartialCommitIT
[WARNING] Tests run: 132, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 375.522 s - in org.apache.phoenix.end2end.StatsCollectorIT
[INFO] Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
[INFO] Running org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.045 s - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
[INFO] Running org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 23.478 s - in org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT
[INFO] Running org.apache.phoenix.hbase.index.covered.FailWithoutRetriesIT
[WARNING] Tests run: 132, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 375.401 s - in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.274 s - in org.apache.phoenix.hbase.index.covered.FailWithoutRetriesIT
[INFO] Tests run: 32, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 293.391 s - in org.apache.phoenix.end2end.index.LocalIndexIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.507 s - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
[INFO] Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
[INFO] Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.728 s - in org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
[INFO] Running org.apache.phoenix.monitoring.PhoenixMetricsIT
[INFO] Running org.apache.phoenix.rpc.PhoenixClientRpcIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.078 s - in org.apache.phoenix.rpc.PhoenixClientRpcIT
[INFO] Running org.apache.phoenix.rpc.PhoenixServerRpcIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.36 s - in org.apache.phoenix.rpc.PhoenixServerRpcIT
[INFO] Tests run: 22, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 63.014 s - in org.apache.phoenix.monitoring.PhoenixMetricsIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 161.98 s - in org.apache.phoenix.iterate.ScannerLeaseRenewalIT
[INFO] Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 519.039 s - in org.apache.phoenix.end2end.index.PartialIndexRebuilderIT
[INFO] 
[INFO] Results:
[INFO] 
[WARNING] Tests run: 691, Failures: 0, Errors: 0, Skipped: 65
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) @ phoenix-core ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  2.516 s]
[INFO] Phoenix Core ....................................... FAILURE [  01:44 h]
[INFO] Phoenix - Flume .................................... SKIPPED
[INFO] Phoenix - Kafka .................................... SKIPPED
[INFO] Phoenix - Pig ...................................... SKIPPED
[INFO] Phoenix Query Server Client ........................ SKIPPED
[INFO] Phoenix Query Server ............................... SKIPPED
[INFO] Phoenix - Pherf .................................... SKIPPED
[INFO] Phoenix - Spark .................................... SKIPPED
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:44 h
[INFO] Finished at: 2017-09-03T09:49:22Z
[INFO] Final Memory: 62M/1533M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
[Fast Archiver] Compressed 1.13 GB of artifacts by 30.6% relative to #550
Recording test results

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #560

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/560/display/redirect?page=changes>

Changes:

[samarth] Revert "PHOENIX-4141 Fix flapping TableSnapshotReadsMapReduceIT"

[samarth] Revert "PHOENIX-4141 Addendum to fix test failure"

[samarth] Revert "PHOENIX-4141 Fix flapping TableSnapshotReadsMapReduceIT"

------------------------------------------
[...truncated 98.41 KB...]
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexSplitForwardScanIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 153.968 s - in org.apache.phoenix.end2end.index.MutableIndexSplitForwardScanIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexSplitReverseScanIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 320.102 s - in org.apache.phoenix.end2end.index.DropColumnIT
[INFO] Running org.apache.phoenix.end2end.index.SaltedIndexIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.463 s - in org.apache.phoenix.end2end.index.SaltedIndexIT
[INFO] Running org.apache.phoenix.end2end.index.ViewIndexIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.918 s - in org.apache.phoenix.end2end.index.ViewIndexIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 154.951 s - in org.apache.phoenix.end2end.index.MutableIndexSplitReverseScanIT
[INFO] Running org.apache.phoenix.end2end.index.txn.RollbackIT
[INFO] Running org.apache.phoenix.end2end.index.txn.MutableRollbackIT
[INFO] Tests run: 67, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 460.961 s - in org.apache.phoenix.end2end.index.IndexExpressionIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
[INFO] Tests run: 102, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1,126.641 s - in org.apache.phoenix.end2end.SortMergeJoinIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 56.33 s - in org.apache.phoenix.end2end.index.txn.RollbackIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.305 s - in org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
[INFO] Running org.apache.phoenix.iterate.RoundRobinResultIteratorIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.262 s - in org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
[INFO] Running org.apache.phoenix.replication.SystemCatalogWALEntryFilterIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.063 s - in org.apache.phoenix.replication.SystemCatalogWALEntryFilterIT
[INFO] Running org.apache.phoenix.rpc.UpdateCacheIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 70.662 s - in org.apache.phoenix.end2end.index.txn.MutableRollbackIT
[INFO] Running org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.351 s - in org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
[INFO] Running org.apache.phoenix.trace.PhoenixTracingEndToEndIT
[INFO] Running org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.419 s - in org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
[INFO] Running org.apache.phoenix.tx.FlappingTransactionIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.875 s - in org.apache.phoenix.rpc.UpdateCacheIT
[INFO] Running org.apache.phoenix.tx.ParameterizedTransactionIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.387 s - in org.apache.phoenix.tx.FlappingTransactionIT
[INFO] Running org.apache.phoenix.tx.TransactionIT
[INFO] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 50.299 s - in org.apache.phoenix.iterate.RoundRobinResultIteratorIT
[INFO] Running org.apache.phoenix.tx.TxCheckpointIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 54.311 s - in org.apache.phoenix.tx.TransactionIT
[INFO] Running org.apache.phoenix.util.IndexScrutinyIT
[INFO] Tests run: 64, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 493.133 s - in org.apache.phoenix.end2end.index.MutableIndexIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.734 s - in org.apache.phoenix.util.IndexScrutinyIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 90.624 s - in org.apache.phoenix.trace.PhoenixTracingEndToEndIT
[WARNING] Tests run: 52, Failures: 0, Errors: 0, Skipped: 4, Time elapsed: 225.011 s - in org.apache.phoenix.tx.ParameterizedTransactionIT
[INFO] Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 296.101 s - in org.apache.phoenix.tx.TxCheckpointIT
[INFO] Tests run: 304, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2,103.643 s - in org.apache.phoenix.end2end.index.IndexIT
[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Failures: 
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshotWithLimit:117->configureJob:130
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshots:78->configureJob:130
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshotsWithCondition:96->configureJob:130
[INFO] 
[ERROR] Tests run: 3039, Failures: 3, Errors: 0, Skipped: 5
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (ClientManagedTimeTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.phoenix.end2end.DerivedTableIT
[INFO] Running org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.CustomEntityDataIT
[INFO] Running org.apache.phoenix.end2end.CreateTableIT
[INFO] Running org.apache.phoenix.end2end.DistinctCountIT
[INFO] Running org.apache.phoenix.end2end.CreateSchemaIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.45 s - in org.apache.phoenix.end2end.CreateSchemaIT
[INFO] Running org.apache.phoenix.end2end.ExtendedQueryExecIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.353 s - in org.apache.phoenix.end2end.CustomEntityDataIT
[INFO] Running org.apache.phoenix.end2end.FunkyNamesIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.672 s - in org.apache.phoenix.end2end.ExtendedQueryExecIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.625 s - in org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.ProductMetricsIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.658 s - in org.apache.phoenix.end2end.FunkyNamesIT
[INFO] Running org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
[INFO] Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.063 s - in org.apache.phoenix.end2end.DerivedTableIT
[INFO] Running org.apache.phoenix.end2end.ReadIsolationLevelIT
[INFO] Running org.apache.phoenix.end2end.NativeHBaseTypesIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.836 s - in org.apache.phoenix.end2end.DistinctCountIT
[INFO] Running org.apache.phoenix.end2end.RowValueConstructorIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.292 s - in org.apache.phoenix.end2end.NativeHBaseTypesIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.242 s - in org.apache.phoenix.end2end.ReadIsolationLevelIT
[INFO] Running org.apache.phoenix.end2end.SequenceBulkAllocationIT
[INFO] Running org.apache.phoenix.end2end.SequenceIT
[INFO] Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.889 s - in org.apache.phoenix.end2end.ProductMetricsIT
[INFO] Running org.apache.phoenix.end2end.ToNumberFunctionIT
[INFO] Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.524 s - in org.apache.phoenix.end2end.ToNumberFunctionIT
[INFO] Running org.apache.phoenix.end2end.TopNIT
[INFO] Tests run: 56, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 29.704 s - in org.apache.phoenix.end2end.SequenceBulkAllocationIT
[INFO] Running org.apache.phoenix.end2end.TruncateFunctionIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.422 s - in org.apache.phoenix.end2end.TopNIT
[INFO] Running org.apache.phoenix.end2end.UpsertValuesIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.858 s - in org.apache.phoenix.end2end.TruncateFunctionIT
[INFO] Running org.apache.phoenix.end2end.VariableLengthPKIT
[INFO] Tests run: 54, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.946 s - in org.apache.phoenix.end2end.SequenceIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.319 s - in org.apache.phoenix.end2end.salted.SaltedTableIT
[INFO] Running org.apache.phoenix.rpc.UpdateCacheWithScnIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.229 s - in org.apache.phoenix.rpc.UpdateCacheWithScnIT
[INFO] Tests run: 50, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 43.407 s - in org.apache.phoenix.end2end.VariableLengthPKIT
[INFO] Tests run: 46, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 124.962 s - in org.apache.phoenix.end2end.RowValueConstructorIT
[INFO] Tests run: 19, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 143.057 s - in org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
[INFO] Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 123.311 s - in org.apache.phoenix.end2end.UpsertValuesIT
[INFO] Tests run: 21, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 193.234 s - in org.apache.phoenix.end2end.CreateTableIT
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 414, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (HBaseManagedTimeTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 0, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (NeedTheirOwnClusterTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Running org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 29.827 s - in org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Running org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.964 s - in org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Running org.apache.phoenix.end2end.ArrayIT
[INFO] Running org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.502 s - in org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Running org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 31.19 s - in org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.347 s - in org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.495 s - in org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.342 s - in org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 83.546 s - in org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 76.39 s - in org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.QueryWithLimitIT
[INFO] Running org.apache.phoenix.end2end.QueryTimeoutIT
[INFO] Running org.apache.phoenix.end2end.RebuildIndexConnectionPropsIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.523 s - in org.apache.phoenix.end2end.RebuildIndexConnectionPropsIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.234 s - in org.apache.phoenix.end2end.QueryWithLimitIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.59 s - in org.apache.phoenix.end2end.QueryTimeoutIT
[INFO] Running org.apache.phoenix.end2end.RegexBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.RenewLeaseIT
[INFO] Running org.apache.phoenix.end2end.SpillableGroupByIT
[INFO] Running org.apache.phoenix.end2end.StatsCollectorIT
[INFO] Tests run: 80, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 232.282 s - in org.apache.phoenix.end2end.ArrayIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.035 s - in org.apache.phoenix.end2end.SpillableGroupByIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.396 s - in org.apache.phoenix.end2end.RenewLeaseIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.769 s - in org.apache.phoenix.end2end.RegexBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
[INFO] Running org.apache.phoenix.end2end.UpdateCacheAcrossDifferentClientsIT
[INFO] Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
[INFO] Running org.apache.phoenix.end2end.index.ImmutableIndexIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 34.154 s - in org.apache.phoenix.end2end.UpdateCacheAcrossDifferentClientsIT
[INFO] Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 53.213 s - in org.apache.phoenix.end2end.UserDefinedFunctionsIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
[WARNING] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.002 s - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
[INFO] Running org.apache.phoenix.end2end.index.LocalIndexIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
[WARNING] Tests run: 24, Failures: 0, Errors: 0, Skipped: 16, Time elapsed: 117.475 s - in org.apache.phoenix.end2end.index.ImmutableIndexIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.501 s - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
[INFO] Running org.apache.phoenix.end2end.index.PartialIndexRebuilderIT
[INFO] Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 63.795 s - in org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
[INFO] Tests run: 140, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 495.825 s - in org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Running org.apache.phoenix.execute.PartialCommitIT
[INFO] Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.951 s - in org.apache.phoenix.execute.PartialCommitIT
[INFO] Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
[INFO] Running org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.877 s - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
[WARNING] Tests run: 132, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 398.43 s - in org.apache.phoenix.end2end.StatsCollectorIT
[INFO] Running org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.816 s - in org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.554 s - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
[INFO] Tests run: 32, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 293.461 s - in org.apache.phoenix.end2end.index.LocalIndexIT
[INFO] Running org.apache.phoenix.hbase.index.covered.FailWithoutRetriesIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.347 s - in org.apache.phoenix.hbase.index.covered.FailWithoutRetriesIT
[WARNING] Tests run: 132, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 395.7 s - in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
[INFO] Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
[INFO] Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.831 s - in org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
[INFO] Running org.apache.phoenix.monitoring.PhoenixMetricsIT
[INFO] Running org.apache.phoenix.rpc.PhoenixClientRpcIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.112 s - in org.apache.phoenix.rpc.PhoenixClientRpcIT
[INFO] Running org.apache.phoenix.rpc.PhoenixServerRpcIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.313 s - in org.apache.phoenix.rpc.PhoenixServerRpcIT
[INFO] Tests run: 22, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 63.006 s - in org.apache.phoenix.monitoring.PhoenixMetricsIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 163.022 s - in org.apache.phoenix.iterate.ScannerLeaseRenewalIT
[INFO] Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 568.986 s - in org.apache.phoenix.end2end.index.PartialIndexRebuilderIT
[INFO] 
[INFO] Results:
[INFO] 
[WARNING] Tests run: 691, Failures: 0, Errors: 0, Skipped: 65
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) @ phoenix-core ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  1.845 s]
[INFO] Phoenix Core ....................................... FAILURE [  01:47 h]
[INFO] Phoenix - Flume .................................... SKIPPED
[INFO] Phoenix - Kafka .................................... SKIPPED
[INFO] Phoenix - Pig ...................................... SKIPPED
[INFO] Phoenix Query Server Client ........................ SKIPPED
[INFO] Phoenix Query Server ............................... SKIPPED
[INFO] Phoenix - Pherf .................................... SKIPPED
[INFO] Phoenix - Spark .................................... SKIPPED
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:47 h
[INFO] Finished at: 2017-09-03T00:35:12Z
[INFO] Final Memory: 60M/1068M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
[Fast Archiver] Compressed 1.14 GB of artifacts by 30.4% relative to #550
Recording test results

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #559

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/559/display/redirect?page=changes>

Changes:

[samarth] PHOENIX-4110 Shutdown mini cluster when number of tables grows beyond a

------------------------------------------
[...truncated 98.52 KB...]
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 153.557 s - in org.apache.phoenix.end2end.index.MutableIndexSplitForwardScanIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexSplitReverseScanIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 321.902 s - in org.apache.phoenix.end2end.index.DropColumnIT
[INFO] Running org.apache.phoenix.end2end.index.SaltedIndexIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.711 s - in org.apache.phoenix.end2end.index.SaltedIndexIT
[INFO] Running org.apache.phoenix.end2end.index.ViewIndexIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.665 s - in org.apache.phoenix.end2end.index.ViewIndexIT
[INFO] Running org.apache.phoenix.end2end.index.txn.MutableRollbackIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 153.811 s - in org.apache.phoenix.end2end.index.MutableIndexSplitReverseScanIT
[INFO] Running org.apache.phoenix.end2end.index.txn.RollbackIT
[INFO] Tests run: 67, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 467.955 s - in org.apache.phoenix.end2end.index.IndexExpressionIT
[INFO] Tests run: 102, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1,123.704 s - in org.apache.phoenix.end2end.SortMergeJoinIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 69.824 s - in org.apache.phoenix.end2end.index.txn.MutableRollbackIT
[INFO] Running org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.072 s - in org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
[INFO] Running org.apache.phoenix.iterate.RoundRobinResultIteratorIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 55.211 s - in org.apache.phoenix.end2end.index.txn.RollbackIT
[INFO] Running org.apache.phoenix.replication.SystemCatalogWALEntryFilterIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.06 s - in org.apache.phoenix.replication.SystemCatalogWALEntryFilterIT
[INFO] Running org.apache.phoenix.rpc.UpdateCacheIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.267 s - in org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
[INFO] Running org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.308 s - in org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
[INFO] Running org.apache.phoenix.trace.PhoenixTracingEndToEndIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.329 s - in org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
[INFO] Running org.apache.phoenix.tx.FlappingTransactionIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.672 s - in org.apache.phoenix.rpc.UpdateCacheIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.442 s - in org.apache.phoenix.tx.FlappingTransactionIT
[INFO] Running org.apache.phoenix.tx.TransactionIT
[INFO] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 50.251 s - in org.apache.phoenix.iterate.RoundRobinResultIteratorIT
[INFO] Running org.apache.phoenix.tx.TxCheckpointIT
[INFO] Running org.apache.phoenix.tx.ParameterizedTransactionIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 54.098 s - in org.apache.phoenix.tx.TransactionIT
[INFO] Running org.apache.phoenix.util.IndexScrutinyIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.653 s - in org.apache.phoenix.util.IndexScrutinyIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 90.422 s - in org.apache.phoenix.trace.PhoenixTracingEndToEndIT
[INFO] Tests run: 64, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 493.073 s - in org.apache.phoenix.end2end.index.MutableIndexIT
[WARNING] Tests run: 52, Failures: 0, Errors: 0, Skipped: 4, Time elapsed: 227.63 s - in org.apache.phoenix.tx.ParameterizedTransactionIT
[INFO] Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 288.416 s - in org.apache.phoenix.tx.TxCheckpointIT
[INFO] Tests run: 304, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2,071.53 s - in org.apache.phoenix.end2end.index.IndexIT
[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Failures: 
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshotWithLimit:139->configureJob:153
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshots:98->configureJob:153
[ERROR]   TableSnapshotReadsMapReduceIT.testMapReduceSnapshotsWithCondition:117->configureJob:153
[INFO] 
[ERROR] Tests run: 3039, Failures: 3, Errors: 0, Skipped: 5
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (ClientManagedTimeTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.phoenix.end2end.CreateSchemaIT
[INFO] Running org.apache.phoenix.end2end.DistinctCountIT
[INFO] Running org.apache.phoenix.end2end.DerivedTableIT
[INFO] Running org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.CustomEntityDataIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.46 s - in org.apache.phoenix.end2end.CreateSchemaIT
[INFO] Running org.apache.phoenix.end2end.CreateTableIT
[INFO] Running org.apache.phoenix.end2end.ExtendedQueryExecIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.391 s - in org.apache.phoenix.end2end.ExtendedQueryExecIT
[INFO] Running org.apache.phoenix.end2end.FunkyNamesIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.326 s - in org.apache.phoenix.end2end.CustomEntityDataIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.537 s - in org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.ProductMetricsIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.298 s - in org.apache.phoenix.end2end.FunkyNamesIT
[INFO] Running org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
[INFO] Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.06 s - in org.apache.phoenix.end2end.DerivedTableIT
[INFO] Running org.apache.phoenix.end2end.ReadIsolationLevelIT
[INFO] Running org.apache.phoenix.end2end.NativeHBaseTypesIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.751 s - in org.apache.phoenix.end2end.DistinctCountIT
[INFO] Running org.apache.phoenix.end2end.RowValueConstructorIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.472 s - in org.apache.phoenix.end2end.NativeHBaseTypesIT
[INFO] Running org.apache.phoenix.end2end.SequenceBulkAllocationIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.074 s - in org.apache.phoenix.end2end.ReadIsolationLevelIT
[INFO] Running org.apache.phoenix.end2end.SequenceIT
[INFO] Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 23.998 s - in org.apache.phoenix.end2end.ProductMetricsIT
[INFO] Running org.apache.phoenix.end2end.ToNumberFunctionIT
[INFO] Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.147 s - in org.apache.phoenix.end2end.ToNumberFunctionIT
[INFO] Running org.apache.phoenix.end2end.TopNIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.321 s - in org.apache.phoenix.end2end.TopNIT
[INFO] Tests run: 56, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.434 s - in org.apache.phoenix.end2end.SequenceBulkAllocationIT
[INFO] Running org.apache.phoenix.end2end.TruncateFunctionIT
[INFO] Running org.apache.phoenix.end2end.UpsertValuesIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.934 s - in org.apache.phoenix.end2end.TruncateFunctionIT
[INFO] Running org.apache.phoenix.end2end.VariableLengthPKIT
[INFO] Tests run: 54, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.546 s - in org.apache.phoenix.end2end.SequenceIT
[INFO] Running org.apache.phoenix.end2end.salted.SaltedTableIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.756 s - in org.apache.phoenix.end2end.salted.SaltedTableIT
[INFO] Running org.apache.phoenix.rpc.UpdateCacheWithScnIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.498 s - in org.apache.phoenix.rpc.UpdateCacheWithScnIT
[INFO] Tests run: 50, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 43.785 s - in org.apache.phoenix.end2end.VariableLengthPKIT
[INFO] Tests run: 46, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 122.681 s - in org.apache.phoenix.end2end.RowValueConstructorIT
[INFO] Tests run: 19, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 139.709 s - in org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
[INFO] Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 121.555 s - in org.apache.phoenix.end2end.UpsertValuesIT
[INFO] Tests run: 21, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 195.189 s - in org.apache.phoenix.end2end.CreateTableIT
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 414, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (HBaseManagedTimeTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 0, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:integration-test (NeedTheirOwnClusterTests) @ phoenix-core ---
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Running org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Running org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 31.118 s - in org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.036 s - in org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Running org.apache.phoenix.end2end.ArrayIT
[INFO] Running org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Running org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.571 s - in org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.538 s - in org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.069 s - in org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.804 s - in org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.615 s - in org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 76.597 s - in org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 77.012 s - in org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.QueryTimeoutIT
[INFO] Running org.apache.phoenix.end2end.QueryWithLimitIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.079 s - in org.apache.phoenix.end2end.QueryWithLimitIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.178 s - in org.apache.phoenix.end2end.QueryTimeoutIT
[INFO] Running org.apache.phoenix.end2end.RebuildIndexConnectionPropsIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.488 s - in org.apache.phoenix.end2end.RebuildIndexConnectionPropsIT
[INFO] Running org.apache.phoenix.end2end.RegexBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.RenewLeaseIT
[INFO] Tests run: 80, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 189.788 s - in org.apache.phoenix.end2end.ArrayIT
[INFO] Running org.apache.phoenix.end2end.SpillableGroupByIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.401 s - in org.apache.phoenix.end2end.RenewLeaseIT
[INFO] Running org.apache.phoenix.end2end.StatsCollectorIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.944 s - in org.apache.phoenix.end2end.SpillableGroupByIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 71.829 s - in org.apache.phoenix.end2end.RegexBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
[INFO] Running org.apache.phoenix.end2end.UpdateCacheAcrossDifferentClientsIT
[INFO] Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
[INFO] Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 34.818 s - in org.apache.phoenix.end2end.UpdateCacheAcrossDifferentClientsIT
[INFO] Running org.apache.phoenix.end2end.index.ImmutableIndexIT
[INFO] Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 53.736 s - in org.apache.phoenix.end2end.UserDefinedFunctionsIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
[WARNING] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.002 s - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
[INFO] Running org.apache.phoenix.end2end.index.LocalIndexIT
[INFO] Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.214 s - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
[WARNING] Tests run: 24, Failures: 0, Errors: 0, Skipped: 16, Time elapsed: 119.515 s - in org.apache.phoenix.end2end.index.ImmutableIndexIT
[INFO] Running org.apache.phoenix.end2end.index.PartialIndexRebuilderIT
[INFO] Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
[INFO] Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 57.698 s - in org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
[INFO] Tests run: 140, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 493.162 s - in org.apache.phoenix.end2end.IndexExtendedIT
[WARNING] Tests run: 132, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 372.919 s - in org.apache.phoenix.end2end.StatsCollectorIT
[INFO] Running org.apache.phoenix.execute.PartialCommitIT
[INFO] Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.7 s - in org.apache.phoenix.execute.PartialCommitIT
[INFO] Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
[INFO] Running org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.509 s - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
[WARNING] Tests run: 132, Failures: 0, Errors: 0, Skipped: 24, Time elapsed: 372.137 s - in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 23.536 s - in org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT
[INFO] Tests run: 32, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 291.372 s - in org.apache.phoenix.end2end.index.LocalIndexIT
[INFO] Running org.apache.phoenix.hbase.index.covered.FailWithoutRetriesIT
[INFO] Running org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.276 s - in org.apache.phoenix.hbase.index.covered.FailWithoutRetriesIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.499 s - in org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
[INFO] Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
[INFO] Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.57 s - in org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
[INFO] Running org.apache.phoenix.rpc.PhoenixClientRpcIT
[INFO] Running org.apache.phoenix.monitoring.PhoenixMetricsIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.072 s - in org.apache.phoenix.rpc.PhoenixClientRpcIT
[INFO] Running org.apache.phoenix.rpc.PhoenixServerRpcIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.04 s - in org.apache.phoenix.rpc.PhoenixServerRpcIT
[INFO] Tests run: 22, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.079 s - in org.apache.phoenix.monitoring.PhoenixMetricsIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 163.019 s - in org.apache.phoenix.iterate.ScannerLeaseRenewalIT
[INFO] Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 524.672 s - in org.apache.phoenix.end2end.index.PartialIndexRebuilderIT
[INFO] 
[INFO] Results:
[INFO] 
[WARNING] Tests run: 691, Failures: 0, Errors: 0, Skipped: 65
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) @ phoenix-core ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  1.986 s]
[INFO] Phoenix Core ....................................... FAILURE [  01:45 h]
[INFO] Phoenix - Flume .................................... SKIPPED
[INFO] Phoenix - Kafka .................................... SKIPPED
[INFO] Phoenix - Pig ...................................... SKIPPED
[INFO] Phoenix Query Server Client ........................ SKIPPED
[INFO] Phoenix Query Server ............................... SKIPPED
[INFO] Phoenix - Pherf .................................... SKIPPED
[INFO] Phoenix - Spark .................................... SKIPPED
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:45 h
[INFO] Finished at: 2017-09-02T12:00:32Z
[INFO] Final Memory: 62M/1527M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.20:verify (ParallelStatsEnabledTest) on project phoenix-core: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-core/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-core
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
[Fast Archiver] Compressed 1.14 GB of artifacts by 30.5% relative to #550
Recording test results

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #558

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/558/display/redirect?page=changes>

Changes:

[samarth] PHOENIX-4141 Fix flapping TableSnapshotReadsMapReduceIT

[samarth] PHOENIX-4152 Don't swallow or wrap exception in BaseQueryIT constructor

------------------------------------------
[...truncated 320.49 KB...]
62502 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5a0cffdb{/jobs/job,null,AVAILABLE}
62502 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@b71b10d{/jobs/job/json,null,AVAILABLE}
62503 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@551ed259{/stages,null,AVAILABLE}
62503 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5a83fcb7{/stages/json,null,AVAILABLE}
62503 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@f4480ce{/stages/stage,null,AVAILABLE}
62504 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5a645c36{/stages/stage/json,null,AVAILABLE}
62504 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@72e20b96{/stages/pool,null,AVAILABLE}
62504 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5666dfc6{/stages/pool/json,null,AVAILABLE}
62505 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7bed6c7a{/storage,null,AVAILABLE}
62505 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@206cfd26{/storage/json,null,AVAILABLE}
62505 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@535fda6b{/storage/rdd,null,AVAILABLE}
62505 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@39fcd190{/storage/rdd/json,null,AVAILABLE}
62506 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@52262ae{/environment,null,AVAILABLE}
62506 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@147c9024{/environment/json,null,AVAILABLE}
62506 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7ddb35fd{/executors,null,AVAILABLE}
62507 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@42b5b598{/executors/json,null,AVAILABLE}
62507 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@27a5305f{/executors/threadDump,null,AVAILABLE}
62507 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4e0ac14e{/executors/threadDump/json,null,AVAILABLE}
62517 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@24190fe9{/static,null,AVAILABLE}
62518 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@69639c82{/,null,AVAILABLE}
62519 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@ab850dc{/api,null,AVAILABLE}
62519 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4ac4a103{/stages/stage/kill,null,AVAILABLE}
62520 [ScalaTest-4] WARN  org.spark_project.jetty.util.component.AbstractLifeCycle  - FAILED ServerConnector@7014893c{HTTP/1.1}{0.0.0.0:4040}: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:463)
	at sun.nio.ch.Net.bind(Net.java:455)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark_project.jetty.server.ServerConnector.open(ServerConnector.java:321)
	at org.spark_project.jetty.server.AbstractNetworkConnector.doStart(AbstractNetworkConnector.java:80)
	at org.spark_project.jetty.server.ServerConnector.doStart(ServerConnector.java:236)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.spark_project.jetty.server.Server.doStart(Server.java:366)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:306)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2171)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2162)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:316)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:139)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:448)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:100)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
62521 [ScalaTest-4] WARN  org.spark_project.jetty.util.component.AbstractLifeCycle  - FAILED org.spark_project.jetty.server.Server@3152f095: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:463)
	at sun.nio.ch.Net.bind(Net.java:455)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark_project.jetty.server.ServerConnector.open(ServerConnector.java:321)
	at org.spark_project.jetty.server.AbstractNetworkConnector.doStart(AbstractNetworkConnector.java:80)
	at org.spark_project.jetty.server.ServerConnector.doStart(ServerConnector.java:236)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.spark_project.jetty.server.Server.doStart(Server.java:366)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:306)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2171)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2162)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:316)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:139)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:448)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:100)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
62524 [ScalaTest-4] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@7014893c{HTTP/1.1}{0.0.0.0:4040}
62527 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4ac4a103{/stages/stage/kill,null,UNAVAILABLE}
62527 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@ab850dc{/api,null,UNAVAILABLE}
62527 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@69639c82{/,null,UNAVAILABLE}
62527 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@24190fe9{/static,null,UNAVAILABLE}
62527 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4e0ac14e{/executors/threadDump/json,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@27a5305f{/executors/threadDump,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@42b5b598{/executors/json,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7ddb35fd{/executors,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@147c9024{/environment/json,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@52262ae{/environment,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@39fcd190{/storage/rdd/json,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@535fda6b{/storage/rdd,null,UNAVAILABLE}
62528 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@206cfd26{/storage/json,null,UNAVAILABLE}
62529 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7bed6c7a{/storage,null,UNAVAILABLE}
62529 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5666dfc6{/stages/pool/json,null,UNAVAILABLE}
62529 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@72e20b96{/stages/pool,null,UNAVAILABLE}
62529 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a645c36{/stages/stage/json,null,UNAVAILABLE}
62530 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@f4480ce{/stages/stage,null,UNAVAILABLE}
62530 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a83fcb7{/stages/json,null,UNAVAILABLE}
62530 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@551ed259{/stages,null,UNAVAILABLE}
62530 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@b71b10d{/jobs/job/json,null,UNAVAILABLE}
62530 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a0cffdb{/jobs/job,null,UNAVAILABLE}
62531 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@383bbd5c{/jobs/json,null,UNAVAILABLE}
62531 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4200a4ae{/jobs,null,UNAVAILABLE}
62536 [ScalaTest-4] INFO  org.spark_project.jetty.server.Server  - jetty-9.2.z-SNAPSHOT
62546 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4200a4ae{/jobs,null,AVAILABLE}
62546 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@383bbd5c{/jobs/json,null,AVAILABLE}
62546 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5a0cffdb{/jobs/job,null,AVAILABLE}
62547 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@b71b10d{/jobs/job/json,null,AVAILABLE}
62547 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@551ed259{/stages,null,AVAILABLE}
62547 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5a83fcb7{/stages/json,null,AVAILABLE}
62547 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@f4480ce{/stages/stage,null,AVAILABLE}
62548 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5a645c36{/stages/stage/json,null,AVAILABLE}
62548 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@72e20b96{/stages/pool,null,AVAILABLE}
62548 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5666dfc6{/stages/pool/json,null,AVAILABLE}
62549 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7bed6c7a{/storage,null,AVAILABLE}
62549 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@206cfd26{/storage/json,null,AVAILABLE}
62549 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@535fda6b{/storage/rdd,null,AVAILABLE}
62549 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@39fcd190{/storage/rdd/json,null,AVAILABLE}
62550 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@52262ae{/environment,null,AVAILABLE}
62550 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@147c9024{/environment/json,null,AVAILABLE}
62550 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7ddb35fd{/executors,null,AVAILABLE}
62551 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@42b5b598{/executors/json,null,AVAILABLE}
62551 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@27a5305f{/executors/threadDump,null,AVAILABLE}
62551 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4e0ac14e{/executors/threadDump/json,null,AVAILABLE}
62552 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@24190fe9{/static,null,AVAILABLE}
62552 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@69639c82{/,null,AVAILABLE}
62552 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@ab850dc{/api,null,AVAILABLE}
62553 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4ac4a103{/stages/stage/kill,null,AVAILABLE}
62560 [ScalaTest-4] INFO  org.spark_project.jetty.server.ServerConnector  - Started ServerConnector@3a77b407{HTTP/1.1}{0.0.0.0:4041}
62560 [ScalaTest-4] INFO  org.spark_project.jetty.server.Server  - Started @66098ms
63115 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@e3a2097{/metrics/json,null,AVAILABLE}
64016 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=43283] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 41920 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
64033 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=37736] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 52954 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
66558 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@34014e32{/SQL,null,AVAILABLE}
66559 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@459524f9{/SQL/json,null,AVAILABLE}
66560 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4fe607f0{/SQL/execution,null,AVAILABLE}
66561 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@27c534e{/SQL/execution/json,null,AVAILABLE}
66563 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@3bf7d59{/static/sql,null,AVAILABLE}
68207 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=43283] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 41928 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
68223 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=37736] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 52962 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from tenant-specific table as DataFrame
69085 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=43283] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 41934 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
69102 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=37736] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 52968 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from tenant-specific table as RDD
- Can write a DataFrame using 'DataFrame.saveToPhoenix' to tenant-specific view
- Can write a DataFrame using 'DataFrame.write' to tenant-specific view
- Can write an RDD to Phoenix tenant-specific view
71592 [ScalaTest-4] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@3a77b407{HTTP/1.1}{0.0.0.0:4041}
71592 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4ac4a103{/stages/stage/kill,null,UNAVAILABLE}
71592 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@ab850dc{/api,null,UNAVAILABLE}
71592 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@69639c82{/,null,UNAVAILABLE}
71592 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@24190fe9{/static,null,UNAVAILABLE}
71592 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4e0ac14e{/executors/threadDump/json,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@27a5305f{/executors/threadDump,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@42b5b598{/executors/json,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7ddb35fd{/executors,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@147c9024{/environment/json,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@52262ae{/environment,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@39fcd190{/storage/rdd/json,null,UNAVAILABLE}
71593 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@535fda6b{/storage/rdd,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@206cfd26{/storage/json,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7bed6c7a{/storage,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5666dfc6{/stages/pool/json,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@72e20b96{/stages/pool,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a645c36{/stages/stage/json,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@f4480ce{/stages/stage,null,UNAVAILABLE}
71594 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a83fcb7{/stages/json,null,UNAVAILABLE}
71595 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@551ed259{/stages,null,UNAVAILABLE}
71595 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@b71b10d{/jobs/job/json,null,UNAVAILABLE}
71595 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a0cffdb{/jobs/job,null,UNAVAILABLE}
71595 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@383bbd5c{/jobs/json,null,UNAVAILABLE}
71595 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4200a4ae{/jobs,null,UNAVAILABLE}
Run completed in 2 minutes, 33 seconds.
Total number of tests run: 5
Suites: completed 3, aborted 1
Tests: succeeded 5, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
158060 [MASTER_META_SERVER_OPERATIONS-asf927:37736-0] ERROR org.apache.hadoop.hbase.master.handler.MetaServerShutdownHandler  - Caught M_META_SERVER_SHUTDOWN, count=1
java.io.IOException: failed log splitting for asf927.gq1.ygridcore.net,43283,1504346707629, will retry
	at org.apache.hadoop.hbase.master.handler.MetaServerShutdownHandler.process(MetaServerShutdownHandler.java:84)
	at org.apache.hadoop.hbase.executor.EventHandler.run(EventHandler.java:129)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.IOException: Filesystem closed
	at org.apache.hadoop.hdfs.DFSClient.checkOpen(DFSClient.java:808)
	at org.apache.hadoop.hdfs.DFSClient.rename(DFSClient.java:1956)
	at org.apache.hadoop.hdfs.DistributedFileSystem.rename(DistributedFileSystem.java:626)
	at org.apache.hadoop.hbase.master.MasterFileSystem.getLogDirs(MasterFileSystem.java:327)
	at org.apache.hadoop.hbase.master.MasterFileSystem.splitLog(MasterFileSystem.java:387)
	at org.apache.hadoop.hbase.master.MasterFileSystem.splitMetaLog(MasterFileSystem.java:306)
	at org.apache.hadoop.hbase.master.MasterFileSystem.splitMetaLog(MasterFileSystem.java:297)
	at org.apache.hadoop.hbase.master.handler.MetaServerShutdownHandler.process(MetaServerShutdownHandler.java:77)
	... 4 more
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  1.887 s]
[INFO] Phoenix Core ....................................... SUCCESS [  01:53 h]
[INFO] Phoenix - Flume .................................... SUCCESS [01:27 min]
[INFO] Phoenix - Kafka .................................... SUCCESS [02:14 min]
[INFO] Phoenix - Pig ...................................... SUCCESS [03:43 min]
[INFO] Phoenix Query Server Client ........................ SUCCESS [ 12.509 s]
[INFO] Phoenix Query Server ............................... SUCCESS [02:11 min]
[INFO] Phoenix - Pherf .................................... SUCCESS [01:51 min]
[INFO] Phoenix - Spark .................................... FAILURE [03:09 min]
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:07 h
[INFO] Finished at: 2017-09-02T10:07:40Z
[INFO] Final Memory: 108M/1317M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (integration-test) on project phoenix-spark: There are test failures -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-spark
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
[Fast Archiver] Compressed 1.14 GB of artifacts by 30.4% relative to #550
Recording test results

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #557

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/557/display/redirect?page=changes>

Changes:

[samarth] PHOENIX-4153 Temporarily disable java doc warning check in QA builds

------------------------------------------
[...truncated 316.93 KB...]
  at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:991)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:342)
  at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:176)
  at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:973)
  at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:811)
  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:742)
  ...
6333 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=40181] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 40999 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
7240 [RpcServer.reader=1,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48152 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
7470 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48156 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
8985 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48168 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
9470 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48180 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
9492 [RpcServer.reader=2,bindAddress=asf927.gq1.ygridcore.net,port=40181] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 35410 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
59755 [ScalaTest-4] INFO  org.spark_project.jetty.util.log  - Logging initialized @62597ms
59876 [ScalaTest-4] INFO  org.spark_project.jetty.server.Server  - jetty-9.2.z-SNAPSHOT
59906 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1ba548e1{/jobs,null,AVAILABLE}
59906 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@53b9f5{/jobs/json,null,AVAILABLE}
59907 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@78d583fd{/jobs/job,null,AVAILABLE}
59907 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@9cc9505{/jobs/job/json,null,AVAILABLE}
59907 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@564b8049{/stages,null,AVAILABLE}
59907 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@76d364e1{/stages/json,null,AVAILABLE}
59908 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@30842cb9{/stages/stage,null,AVAILABLE}
59908 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@35943e91{/stages/stage/json,null,AVAILABLE}
59908 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1797bd12{/stages/pool,null,AVAILABLE}
59908 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6afdf4d7{/stages/pool/json,null,AVAILABLE}
59909 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@46d58621{/storage,null,AVAILABLE}
59909 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6c5904db{/storage/json,null,AVAILABLE}
59909 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@484dfd6f{/storage/rdd,null,AVAILABLE}
59909 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7839a77e{/storage/rdd/json,null,AVAILABLE}
59909 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@11237edc{/environment,null,AVAILABLE}
59910 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@315da64e{/environment/json,null,AVAILABLE}
59910 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7b516433{/executors,null,AVAILABLE}
59910 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1c898f85{/executors/json,null,AVAILABLE}
59910 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@b2d7366{/executors/threadDump,null,AVAILABLE}
59910 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@54f33345{/executors/threadDump/json,null,AVAILABLE}
59917 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2aab2295{/static,null,AVAILABLE}
59917 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@496b7a15{/,null,AVAILABLE}
59917 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2f1c1e5b{/api,null,AVAILABLE}
59918 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5f0d0086{/stages/stage/kill,null,AVAILABLE}
59918 [ScalaTest-4] WARN  org.spark_project.jetty.util.component.AbstractLifeCycle  - FAILED ServerConnector@42f160b2{HTTP/1.1}{0.0.0.0:4040}: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:463)
	at sun.nio.ch.Net.bind(Net.java:455)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark_project.jetty.server.ServerConnector.open(ServerConnector.java:321)
	at org.spark_project.jetty.server.AbstractNetworkConnector.doStart(AbstractNetworkConnector.java:80)
	at org.spark_project.jetty.server.ServerConnector.doStart(ServerConnector.java:236)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.spark_project.jetty.server.Server.doStart(Server.java:366)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:306)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2171)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2162)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:316)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:139)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:448)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:100)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
59919 [ScalaTest-4] WARN  org.spark_project.jetty.util.component.AbstractLifeCycle  - FAILED org.spark_project.jetty.server.Server@32be9209: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:463)
	at sun.nio.ch.Net.bind(Net.java:455)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark_project.jetty.server.ServerConnector.open(ServerConnector.java:321)
	at org.spark_project.jetty.server.AbstractNetworkConnector.doStart(AbstractNetworkConnector.java:80)
	at org.spark_project.jetty.server.ServerConnector.doStart(ServerConnector.java:236)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.spark_project.jetty.server.Server.doStart(Server.java:366)
	at org.spark_project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:306)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.ui.JettyUtils$$anonfun$5.apply(JettyUtils.scala:316)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2171)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2162)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:316)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:139)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at org.apache.spark.SparkContext$$anonfun$10.apply(SparkContext.scala:448)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:448)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:100)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
59921 [ScalaTest-4] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@42f160b2{HTTP/1.1}{0.0.0.0:4040}
59923 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5f0d0086{/stages/stage/kill,null,UNAVAILABLE}
59923 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2f1c1e5b{/api,null,UNAVAILABLE}
59923 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@496b7a15{/,null,UNAVAILABLE}
59923 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2aab2295{/static,null,UNAVAILABLE}
59923 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@54f33345{/executors/threadDump/json,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@b2d7366{/executors/threadDump,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1c898f85{/executors/json,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7b516433{/executors,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@315da64e{/environment/json,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@11237edc{/environment,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7839a77e{/storage/rdd/json,null,UNAVAILABLE}
59924 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@484dfd6f{/storage/rdd,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6c5904db{/storage/json,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@46d58621{/storage,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6afdf4d7{/stages/pool/json,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1797bd12{/stages/pool,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@35943e91{/stages/stage/json,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@30842cb9{/stages/stage,null,UNAVAILABLE}
59925 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@76d364e1{/stages/json,null,UNAVAILABLE}
59926 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@564b8049{/stages,null,UNAVAILABLE}
59926 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@9cc9505{/jobs/job/json,null,UNAVAILABLE}
59926 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@78d583fd{/jobs/job,null,UNAVAILABLE}
59926 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@53b9f5{/jobs/json,null,UNAVAILABLE}
59926 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1ba548e1{/jobs,null,UNAVAILABLE}
59930 [ScalaTest-4] INFO  org.spark_project.jetty.server.Server  - jetty-9.2.z-SNAPSHOT
59942 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1ba548e1{/jobs,null,AVAILABLE}
59942 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@53b9f5{/jobs/json,null,AVAILABLE}
59943 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@78d583fd{/jobs/job,null,AVAILABLE}
59943 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@9cc9505{/jobs/job/json,null,AVAILABLE}
59943 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@564b8049{/stages,null,AVAILABLE}
59943 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@76d364e1{/stages/json,null,AVAILABLE}
59944 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@30842cb9{/stages/stage,null,AVAILABLE}
59944 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@35943e91{/stages/stage/json,null,AVAILABLE}
59944 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1797bd12{/stages/pool,null,AVAILABLE}
59944 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6afdf4d7{/stages/pool/json,null,AVAILABLE}
59944 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@46d58621{/storage,null,AVAILABLE}
59945 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6c5904db{/storage/json,null,AVAILABLE}
59945 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@484dfd6f{/storage/rdd,null,AVAILABLE}
59945 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7839a77e{/storage/rdd/json,null,AVAILABLE}
59945 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@11237edc{/environment,null,AVAILABLE}
59945 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@315da64e{/environment/json,null,AVAILABLE}
59945 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7b516433{/executors,null,AVAILABLE}
59946 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1c898f85{/executors/json,null,AVAILABLE}
59946 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@b2d7366{/executors/threadDump,null,AVAILABLE}
59946 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@54f33345{/executors/threadDump/json,null,AVAILABLE}
59946 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2aab2295{/static,null,AVAILABLE}
59946 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@496b7a15{/,null,AVAILABLE}
59947 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2f1c1e5b{/api,null,AVAILABLE}
59947 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5f0d0086{/stages/stage/kill,null,AVAILABLE}
59952 [ScalaTest-4] INFO  org.spark_project.jetty.server.ServerConnector  - Started ServerConnector@27139ddf{HTTP/1.1}{0.0.0.0:4041}
59952 [ScalaTest-4] INFO  org.spark_project.jetty.server.Server  - Started @62796ms
60343 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5c44263f{/metrics/json,null,AVAILABLE}
61144 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48384 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
61157 [RpcServer.reader=3,bindAddress=asf927.gq1.ygridcore.net,port=40181] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 35614 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
63454 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@20fb8949{/SQL,null,AVAILABLE}
63455 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@61436c9b{/SQL/json,null,AVAILABLE}
63456 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@405c8ca9{/SQL/execution,null,AVAILABLE}
63456 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@3cbbff0c{/SQL/execution/json,null,AVAILABLE}
63458 [ScalaTest-4-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4c2f4d33{/static/sql,null,AVAILABLE}
64595 [RpcServer.reader=6,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48394 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
64603 [RpcServer.reader=4,bindAddress=asf927.gq1.ygridcore.net,port=40181] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 35624 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from tenant-specific table as DataFrame
65297 [RpcServer.reader=7,bindAddress=asf927.gq1.ygridcore.net,port=45871] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 48400 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
65309 [RpcServer.reader=5,bindAddress=asf927.gq1.ygridcore.net,port=40181] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.163 port: 35630 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from tenant-specific table as RDD
- Can write a DataFrame using 'DataFrame.saveToPhoenix' to tenant-specific view
- Can write a DataFrame using 'DataFrame.write' to tenant-specific view
- Can write an RDD to Phoenix tenant-specific view
67161 [ScalaTest-4] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@27139ddf{HTTP/1.1}{0.0.0.0:4041}
67162 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5f0d0086{/stages/stage/kill,null,UNAVAILABLE}
67162 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2f1c1e5b{/api,null,UNAVAILABLE}
67162 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@496b7a15{/,null,UNAVAILABLE}
67162 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2aab2295{/static,null,UNAVAILABLE}
67162 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@54f33345{/executors/threadDump/json,null,UNAVAILABLE}
67162 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@b2d7366{/executors/threadDump,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1c898f85{/executors/json,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7b516433{/executors,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@315da64e{/environment/json,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@11237edc{/environment,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7839a77e{/storage/rdd/json,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@484dfd6f{/storage/rdd,null,UNAVAILABLE}
67163 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6c5904db{/storage/json,null,UNAVAILABLE}
67164 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@46d58621{/storage,null,UNAVAILABLE}
67164 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6afdf4d7{/stages/pool/json,null,UNAVAILABLE}
67164 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1797bd12{/stages/pool,null,UNAVAILABLE}
67164 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@35943e91{/stages/stage/json,null,UNAVAILABLE}
67164 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@30842cb9{/stages/stage,null,UNAVAILABLE}
67164 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@76d364e1{/stages/json,null,UNAVAILABLE}
67165 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@564b8049{/stages,null,UNAVAILABLE}
67165 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@9cc9505{/jobs/job/json,null,UNAVAILABLE}
67165 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@78d583fd{/jobs/job,null,UNAVAILABLE}
67165 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@53b9f5{/jobs/json,null,UNAVAILABLE}
67165 [ScalaTest-4] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1ba548e1{/jobs,null,UNAVAILABLE}
Run completed in 2 minutes, 31 seconds.
Total number of tests run: 5
Suites: completed 3, aborted 1
Tests: succeeded 5, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  1.770 s]
[INFO] Phoenix Core ....................................... SUCCESS [  01:50 h]
[INFO] Phoenix - Flume .................................... SUCCESS [01:29 min]
[INFO] Phoenix - Kafka .................................... SUCCESS [02:15 min]
[INFO] Phoenix - Pig ...................................... SUCCESS [03:44 min]
[INFO] Phoenix Query Server Client ........................ SUCCESS [ 12.581 s]
[INFO] Phoenix Query Server ............................... SUCCESS [02:10 min]
[INFO] Phoenix - Pherf .................................... SUCCESS [01:50 min]
[INFO] Phoenix - Spark .................................... FAILURE [03:07 min]
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:05 h
[INFO] Finished at: 2017-09-02T07:50:49Z
[INFO] Final Memory: 113M/1446M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (integration-test) on project phoenix-spark: There are test failures -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-spark
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
[Fast Archiver] Compressed 1.14 GB of artifacts by 30.4% relative to #550
Recording test results