You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/01/26 23:06:42 UTC

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #652

See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/652/display/redirect>

------------------------------------------
Started by an SCM change
Started by an SCM change
Started by an SCM change
Started by an SCM change
Started by an SCM change
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on ubuntu-eu2 (ubuntu trusty) in workspace <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/>
java.io.IOException: Failed to mkdirs: <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/>
	at hudson.FilePath.mkdirs(FilePath.java:1170)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1200)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
	at hudson.model.Run.execute(Run.java:1724)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:421)
Retrying after 10 seconds
java.io.IOException: Failed to mkdirs: <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/>
	at hudson.FilePath.mkdirs(FilePath.java:1170)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1200)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
	at hudson.model.Run.execute(Run.java:1724)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:421)
Retrying after 10 seconds
java.io.IOException: Failed to mkdirs: <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/>
	at hudson.FilePath.mkdirs(FilePath.java:1170)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1200)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
	at hudson.model.Run.execute(Run.java:1724)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:421)
Archiving artifacts
ERROR: Build step failed with exception
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to ubuntu-eu2
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1693)
		at hudson.remoting.UserResponse.retrieve(UserRequest.java:310)
		at hudson.remoting.Channel.call(Channel.java:908)
		at hudson.FilePath.act(FilePath.java:986)
		at hudson.FilePath.act(FilePath.java:975)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1749)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:421)
<https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/> does not exist.
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:483)
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:460)
	at hudson.tasks.ArtifactArchiver$ListFiles.invoke(ArtifactArchiver.java:298)
	at hudson.tasks.ArtifactArchiver$ListFiles.invoke(ArtifactArchiver.java:278)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2760)
	at hudson.remoting.UserRequest.perform(UserRequest.java:207)
	at hudson.remoting.UserRequest.perform(UserRequest.java:53)
	at hudson.remoting.Request$2.run(Request.java:358)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Build step 'Archive the artifacts' marked build as failure
Recording test results
ERROR: Build step failed with exception
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to ubuntu-eu2
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1693)
		at hudson.remoting.UserResponse.retrieve(UserRequest.java:310)
		at hudson.remoting.Channel.call(Channel.java:908)
		at hudson.FilePath.act(FilePath.java:986)
		at hudson.FilePath.act(FilePath.java:975)
		at hudson.tasks.junit.JUnitParser.parseResult(JUnitParser.java:114)
		at hudson.tasks.junit.JUnitResultArchiver.parse(JUnitResultArchiver.java:136)
		at hudson.tasks.junit.JUnitResultArchiver.parseAndAttach(JUnitResultArchiver.java:166)
		at hudson.tasks.junit.JUnitResultArchiver.perform(JUnitResultArchiver.java:153)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1749)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:421)
<https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/> does not exist.
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:483)
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:460)
	at hudson.tasks.junit.JUnitParser$ParseResultCallable.invoke(JUnitParser.java:141)
	at hudson.tasks.junit.JUnitParser$ParseResultCallable.invoke(JUnitParser.java:118)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2760)
	at hudson.remoting.UserRequest.perform(UserRequest.java:207)
	at hudson.remoting.UserRequest.perform(UserRequest.java:53)
	at hudson.remoting.Request$2.run(Request.java:358)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Build step 'Publish JUnit test result report' marked build as failure

Jenkins build is back to normal : Phoenix-4.x-HBase-1.1 #654

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/654/display/redirect?page=changes>


Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #653

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/653/display/redirect?page=changes>

Changes:

[jtaylor] PHOENIX-4560 ORDER BY with GROUP BY doesn't work if there is WHERE on pk

------------------------------------------
[...truncated 325.52 KB...]
125254 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49968 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD and execute query
128245 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49018 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
128260 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49976 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD and execute query on case sensitive table (no config)
128756 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49024 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
128777 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49982 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
128886 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49030 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
128904 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49988 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD and execute constrained query
130262 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49036 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
130274 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49994 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD with predicate that will never match
130645 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49042 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
130660 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50000 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can create schema RDD with complex predicate
131039 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49048 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
131051 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50006 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can query an array table
131485 [RpcServer.reader=4,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49056 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
131502 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50014 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read a table as an RDD
- Can save to phoenix table
- Can save Java and Joda dates to Phoenix (no config)
- Can infer schema without defining columns
132694 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49062 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
132704 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50020 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Spark SQL can use Phoenix as a data source with no schema specified
133202 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49068 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
133210 [RpcServer.reader=4,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50026 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Spark SQL can use Phoenix as a data source with PrunedFilteredScan
133736 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49074 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
133744 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50032 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can persist a dataframe using 'DataFrame.saveToPhoenix'
134319 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49080 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
134328 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50038 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can persist a dataframe using 'DataFrame.save()
- Can save arrays back to phoenix
134812 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49086 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
134823 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50044 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
135020 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49092 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
135036 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50050 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from table with schema and escaped table name
- Ensure DataFrame field normalization (PHOENIX-2196)
135736 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49098 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
135745 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50056 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
136129 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49104 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
136140 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50062 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
136426 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49110 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
136439 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50068 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
136785 [RpcServer.reader=4,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49118 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
136794 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50076 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
137128 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49124 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
137137 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50082 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
137468 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49130 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
137480 [RpcServer.reader=4,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50088 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
138889 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49136 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
138902 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50094 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
139237 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49142 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
139249 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50100 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
139568 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49148 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
139579 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50106 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
139862 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49154 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
139874 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50112 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
140187 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49160 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
140200 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50118 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Ensure Dataframe supports LIKE and IN filters (PHOENIX-2328)
140587 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49166 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
140599 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50124 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load decimal types with accurate precision and scale (PHOENIX-2288)
141053 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49172 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
141062 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50130 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
141369 [RpcServer.reader=4,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49186 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
141378 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50144 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load small and tiny integeger types (PHOENIX-2426)
- Can save arrays from custom dataframes back to phoenix
- Can save arrays of AnyVal type back to phoenix
- Can save arrays of Byte type back to phoenix
- Can save binary types back to phoenix
143212 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49198 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
143224 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50156 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load Phoenix DATE columns through DataFrame API
143668 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49204 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
143678 [RpcServer.reader=4,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50162 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can coerce Phoenix DATE columns to TIMESTAMP through DataFrame API
144069 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49210 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
144080 [RpcServer.reader=5,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50168 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can load Phoenix Time columns through DataFrame API
144658 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49216 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
144671 [RpcServer.reader=6,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50174 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
145387 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49222 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
145401 [RpcServer.reader=7,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50180 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- can read all Phoenix data types
145547 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@a2c7966{HTTP/1.1}{0.0.0.0:4040}
145550 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7eb22dcf{/stages/stage/kill,null,UNAVAILABLE}
145550 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4058b099{/api,null,UNAVAILABLE}
145550 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@36d2a90d{/,null,UNAVAILABLE}
145550 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@20321066{/static,null,UNAVAILABLE}
145550 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4c86b66a{/executors/threadDump/json,null,UNAVAILABLE}
145551 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2cee3b89{/executors/threadDump,null,UNAVAILABLE}
145551 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7a96792f{/executors/json,null,UNAVAILABLE}
145551 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@630d3ece{/executors,null,UNAVAILABLE}
145551 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@78811700{/environment/json,null,UNAVAILABLE}
145551 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@708a1178{/environment,null,UNAVAILABLE}
145551 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@17a95eba{/storage/rdd/json,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6b2a1485{/storage/rdd,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4a3b0e55{/storage/json,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5b59660d{/storage,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@932a2ba{/stages/pool/json,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4c920f6c{/stages/pool,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@34979855{/stages/stage/json,null,UNAVAILABLE}
145552 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@72a51738{/stages/stage,null,UNAVAILABLE}
145553 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2973d11a{/stages/json,null,UNAVAILABLE}
145553 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@735270ec{/stages,null,UNAVAILABLE}
145553 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5a04d557{/jobs/job/json,null,UNAVAILABLE}
145553 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@69dfa5e6{/jobs/job,null,UNAVAILABLE}
145553 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@68b78a67{/jobs/json,null,UNAVAILABLE}
145553 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@3bd62771{/jobs,null,UNAVAILABLE}
PhoenixSparkITTenantSpecific:
274333 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49494 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
274448 [RpcServer.reader=8,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50452 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
337028 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.Server  - jetty-9.2.z-SNAPSHOT
337031 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6f579167{/jobs,null,AVAILABLE}
337031 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@5e0b3a4c{/jobs/json,null,AVAILABLE}
337031 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@332083d0{/jobs/job,null,AVAILABLE}
337031 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@1bade6e9{/jobs/job/json,null,AVAILABLE}
337032 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@3612d931{/stages,null,AVAILABLE}
337032 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@f713bb3{/stages/json,null,AVAILABLE}
337032 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@53066cb0{/stages/stage,null,AVAILABLE}
337032 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4ac9c165{/stages/stage/json,null,AVAILABLE}
337032 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@3b7f43f{/stages/pool,null,AVAILABLE}
337033 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@22045601{/stages/pool/json,null,AVAILABLE}
337033 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4aaa6d19{/storage,null,AVAILABLE}
337033 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7ae0aa9a{/storage/json,null,AVAILABLE}
337033 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@33e0a37c{/storage/rdd,null,AVAILABLE}
337033 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@610d3c7f{/storage/rdd/json,null,AVAILABLE}
337034 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2bfee7a0{/environment,null,AVAILABLE}
337034 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6c18cff0{/environment/json,null,AVAILABLE}
337034 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@50fbcd01{/executors,null,AVAILABLE}
337034 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@43502630{/executors/json,null,AVAILABLE}
337034 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@42fb3bd6{/executors/threadDump,null,AVAILABLE}
337035 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@7c1580f4{/executors/threadDump/json,null,AVAILABLE}
337035 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@67c552d0{/static,null,AVAILABLE}
337036 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@4bc00ae9{/,null,AVAILABLE}
337036 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@2d0c6dd9{/api,null,AVAILABLE}
337036 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@affd8aa{/stages/stage/kill,null,AVAILABLE}
337037 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.ServerConnector  - Started ServerConnector@3dceb5f3{HTTP/1.1}{0.0.0.0:4040}
337037 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.Server  - Started @350030ms
337075 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@51bbd937{/metrics/json,null,AVAILABLE}
337205 [RpcServer.reader=1,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49678 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
337218 [ScalaTest-main-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@6249046d{/SQL,null,AVAILABLE}
337219 [ScalaTest-main-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@21c97c80{/SQL/json,null,AVAILABLE}
337220 [ScalaTest-main-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@16d0e2b6{/SQL/execution,null,AVAILABLE}
337220 [ScalaTest-main-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@3526f569{/SQL/execution/json,null,AVAILABLE}
337221 [ScalaTest-main-running-PhoenixSparkITTenantSpecific] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Started o.s.j.s.ServletContextHandler@71830491{/static/sql,null,AVAILABLE}
337292 [RpcServer.reader=2,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49682 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
337301 [RpcServer.reader=9,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50640 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from tenant-specific table as DataFrame
337533 [RpcServer.reader=3,bindAddress=asf934.gq1.ygridcore.net,port=41693] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 49688 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
337543 [RpcServer.reader=0,bindAddress=asf934.gq1.ygridcore.net,port=41617] INFO  SecurityLogger.org.apache.hadoop.hbase.Server  - Connection from 67.195.81.168 port: 50646 with version info: version: "1.1.9" url: "git://diocles.local/Volumes/hbase-1.1.9/hbase" revision: "0d1feabed5295495ed2257d31fab9e6553e8a9d7" user: "ndimiduk" date: "Mon Feb 20 22:35:28 PST 2017" src_checksum: "b68339108ddccd1dfc44a76646588a58"
- Can read from tenant-specific table as RDD
- Can write a DataFrame using 'DataFrame.saveToPhoenix' to tenant-specific view
- Can write a DataFrame using 'DataFrame.write' to tenant-specific view
- Can write an RDD to Phoenix tenant-specific view
338649 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.ServerConnector  - Stopped ServerConnector@3dceb5f3{HTTP/1.1}{0.0.0.0:4040}
338650 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@affd8aa{/stages/stage/kill,null,UNAVAILABLE}
338650 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2d0c6dd9{/api,null,UNAVAILABLE}
338650 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4bc00ae9{/,null,UNAVAILABLE}
338650 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@67c552d0{/static,null,UNAVAILABLE}
338650 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7c1580f4{/executors/threadDump/json,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@42fb3bd6{/executors/threadDump,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@43502630{/executors/json,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@50fbcd01{/executors,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6c18cff0{/environment/json,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@2bfee7a0{/environment,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@610d3c7f{/storage/rdd/json,null,UNAVAILABLE}
338651 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@33e0a37c{/storage/rdd,null,UNAVAILABLE}
338652 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@7ae0aa9a{/storage/json,null,UNAVAILABLE}
338652 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4aaa6d19{/storage,null,UNAVAILABLE}
338652 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@22045601{/stages/pool/json,null,UNAVAILABLE}
338652 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@3b7f43f{/stages/pool,null,UNAVAILABLE}
338652 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@4ac9c165{/stages/stage/json,null,UNAVAILABLE}
338652 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@53066cb0{/stages/stage,null,UNAVAILABLE}
338653 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@f713bb3{/stages/json,null,UNAVAILABLE}
338653 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@3612d931{/stages,null,UNAVAILABLE}
338653 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@1bade6e9{/jobs/job/json,null,UNAVAILABLE}
338653 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@332083d0{/jobs/job,null,UNAVAILABLE}
338653 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@5e0b3a4c{/jobs/json,null,UNAVAILABLE}
338653 [ScalaTest-main-running-DiscoverySuite] INFO  org.spark_project.jetty.server.handler.ContextHandler  - Stopped o.s.j.s.ServletContextHandler@6f579167{/jobs,null,UNAVAILABLE}
Run completed in 7 minutes, 51 seconds.
Total number of tests run: 35
Suites: completed 4, aborted 0
Tests: succeeded 35, failed 0, canceled 0, ignored 0, pending 0
All tests passed.
[INFO] 
[INFO] --- maven-install-plugin:2.5.2:install (default-install) @ phoenix-spark ---
[INFO] Installing <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/653/artifact/phoenix-spark/target/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT.jar> to /home/jenkins/.m2/repository/org/apache/phoenix/phoenix-spark/4.14.0-HBase-1.1-SNAPSHOT/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT.jar
[INFO] Installing <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/ws/phoenix-spark/pom.xml> to /home/jenkins/.m2/repository/org/apache/phoenix/phoenix-spark/4.14.0-HBase-1.1-SNAPSHOT/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT.pom
[INFO] Installing <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/653/artifact/phoenix-spark/target/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT-sources.jar> to /home/jenkins/.m2/repository/org/apache/phoenix/phoenix-spark/4.14.0-HBase-1.1-SNAPSHOT/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT-sources.jar
[INFO] Installing <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/653/artifact/phoenix-spark/target/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT-tests.jar> to /home/jenkins/.m2/repository/org/apache/phoenix/phoenix-spark/4.14.0-HBase-1.1-SNAPSHOT/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT-tests.jar
[INFO] Installing <https://builds.apache.org/job/Phoenix-4.x-HBase-1.1/653/artifact/phoenix-spark/target/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT-javadoc.jar> to /home/jenkins/.m2/repository/org/apache/phoenix/phoenix-spark/4.14.0-HBase-1.1-SNAPSHOT/phoenix-spark-4.14.0-HBase-1.1-SNAPSHOT-javadoc.jar
[INFO] 
[INFO] ------------------------------------------------------------------------
[INFO] Building Phoenix - Hive 4.14.0-HBase-1.1-SNAPSHOT
[INFO] ------------------------------------------------------------------------
Downloading from apache release: https://repository.apache.org/content/repositories/releases/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom
[WARNING] Failed to create parent directories for tracking file /home/jenkins/.m2/repository/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom.lastUpdated
Downloading from central: https://repo.maven.apache.org/maven2/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom
[WARNING] Failed to create parent directories for tracking file /home/jenkins/.m2/repository/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom.lastUpdated
Downloading from datanucleus: http://www.datanucleus.org/downloads/maven2/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom
[WARNING] Failed to create parent directories for tracking file /home/jenkins/.m2/repository/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom.lastUpdated
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  5.562 s]
[INFO] Phoenix Core ....................................... SUCCESS [  02:23 h]
[INFO] Phoenix - Flume .................................... SUCCESS [02:13 min]
[INFO] Phoenix - Kafka .................................... SUCCESS [02:29 min]
[INFO] Phoenix - Pig ...................................... SUCCESS [05:09 min]
[INFO] Phoenix Query Server Client ........................ SUCCESS [ 16.308 s]
[INFO] Phoenix Query Server ............................... SUCCESS [02:33 min]
[INFO] Phoenix - Pherf .................................... SUCCESS [02:49 min]
[INFO] Phoenix - Spark .................................... SUCCESS [08:34 min]
[INFO] Phoenix - Hive ..................................... FAILURE [  1.090 s]
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] Phoenix Load Balancer .............................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:47 h
[INFO] Finished at: 2018-01-27T02:43:20Z
[INFO] Final Memory: 119M/1421M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal on project phoenix-hive: Could not resolve dependencies for project org.apache.phoenix:phoenix-hive:jar:4.14.0-HBase-1.1-SNAPSHOT: Failed to collect dependencies at org.apache.hive:hive-cli:jar:1.2.1 -> org.apache.hive:hive-service:jar:1.2.1 -> org.eclipse.jetty.aggregate:jetty-all:jar:7.6.0.v20120127 -> org.apache.geronimo.specs:geronimo-jaspic_1.0_spec:jar:1.0: Failed to read artifact descriptor for org.apache.geronimo.specs:geronimo-jaspic_1.0_spec:jar:1.0: Could not transfer artifact org.apache.geronimo.specs:geronimo-jaspic_1.0_spec:pom:1.0 from/to apache release (https://repository.apache.org/content/repositories/releases/): /home/jenkins/.m2/repository/org/apache/geronimo/specs/geronimo-jaspic_1.0_spec/1.0/geronimo-jaspic_1.0_spec-1.0.pom.part.lock (No such file or directory) -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/DependencyResolutionException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-hive
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Recording test results