You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@phoenix.apache.org by Dmitry Goldenberg <dg...@kmwllc.com> on 2014/05/19 00:47:07 UTC

Phoenix client jar vs. minimal jar

Hi,
I'm getting errors whether I'm trying to use Phoenix using the client jar
or the minimal jar, in the context of an application deployed to Tomcat.
In the client jar, I have hbase.defaults.for.version set to 0.98.1-hadoop2,
and I'm running against hadoop 2.4 and hbase 0.98.2. I get the error whose
stack trace I'm attaching as Stack 1 below. It's almost like Phoenix is
still trying to find Hadoop 1, perhaps?

I've tried switching to the minimal jar and determining the set of
dependency jars for it and I get the error as per Stack 2 below. I've
looked at org.apache.hadoop.hbase.client.Scan and I can't seem to tell what
could cause the NoClassDefFoundError there. Could it be the Hadoop
annotations or the LogFactory.getLog(Scan.class)?
Any ideas/pointers would be appreciated, thanks.

Stack 1:
Caused by: java.lang.NoSuchMethodError:
org.apache.hadoop.conf.Configuration.addDeprecations([Lorg/apache/hadoop/conf/Configuration$DeprecationDelta;)V
at
org.apache.hadoop.hdfs.HdfsConfiguration.addDeprecatedKeys(HdfsConfiguration.java:66)
~[hadoop-hdfs-2.4.0.jar:?]
at
org.apache.hadoop.hdfs.HdfsConfiguration.<clinit>(HdfsConfiguration.java:31)
~[hadoop-hdfs-2.4.0.jar:?]
at
org.apache.hadoop.hdfs.DistributedFileSystem.<clinit>(DistributedFileSystem.java:110)
~[hadoop-hdfs-2.4.0.jar:?]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
~[?:1.6.0_28]
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
~[?:1.6.0_28]
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
~[?:1.6.0_28]
at java.lang.reflect.Constructor.newInstance(Constructor.java:534)
~[?:1.6.0_28]
at java.lang.Class.newInstance(Class.java:374) ~[?:1.6.0_28]
at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:373)
~[?:1.6.0_28]
at java.util.ServiceLoader$1.next(ServiceLoader.java:445) ~[?:1.6.0_28]
at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2400)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2411)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:166)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:351)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:201)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
~[?:1.6.0_28]
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
~[?:1.6.0_28]
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
~[?:1.6.0_28]
at java.lang.reflect.Constructor.newInstance(Constructor.java:534)
~[?:1.6.0_28]
at
org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:309)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.phoenix.query.HConnectionFactory$HConnectionFactoryImpl.createConnection(HConnectionFactory.java:47)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.openConnection(ConnectionQueryServicesImpl.java:252)
~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]

===============================

Stack 2:
Caused by: java.lang.NoClassDefFoundError: Could not initialize class
org.apache.hadoop.hbase.client.Scan
at
org.apache.phoenix.compile.CreateTableCompiler.compile(CreateTableCompiler.java:81)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableCreateTableStatement.compilePlan(PhoenixStatement.java:436)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableCreateTableStatement.compilePlan(PhoenixStatement.java:425)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:224)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:908)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:1452)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:131)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at
org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.connect(PhoenixEmbeddedDriver.java:112)
~[phoenix-4.0.0-incubating-client-minimal.jar:?]
at java.sql.DriverManager.getConnection(DriverManager.java:571)
~[?:1.7.0_51]
at java.sql.DriverManager.getConnection(DriverManager.java:233)
~[?:1.7.0_51]

Re: Phoenix client jar vs. minimal jar

Posted by Dmitry Goldenberg <dg...@kmwllc.com>.
Juan,

You've brought up a great point. Yes indeed this may be a good choice, with
the "without" jar, since everything but the Hadoop and HBase classes are
wrapped in a single jar. My worry, however, is about the ordering of these
dependency jars/classes and how they may affect the rest of the
application.  There's a good number of jars the app already depends on,
which overlap with Phoenix's dependencies.  Which was the reason why I went
with the minimal jar and then cherry-picked all the dependencies one by
one.  With the "without" jar it seems we'd lose control over these
overlapping dependencies and they could cause odd effects.

One thing I noticed was that slf4j had to be added to the lib directory of
the app under Tomcat, which explained the NoClassDefFoundError exception I
quoted.

The other thing that was curious was it appeared that I needed the
jsr305-1.3.9.jar for some javax.annotations dependencies and I'm not seeing
that in the Phoenix pom files.

- Dmitry



On Tue, May 20, 2014 at 4:09 AM, Juan Rodríguez Hortalá <
juan.rodriguez.hortala@gmail.com> wrote:

> Hi Dmitry, I'm not sure if it's relevant, but with HBase 0.94.6 and
> Phoenix 3 things worked for me with phoenix-*client-without-hbase.jar,
> /usr/lib/hbase/hbase.jar, /usr/lib/hadoop/hadoop-common.jar and
> /usr/lib/hadoop/hadoop-auth.jar copied to  tomcat/lib in my instalation of
> Saiku, which is also a Tomcat application.
>
> I hope that helps,
>
> Greetings,
>
> Juan Rodríguez
>
>
> 2014-05-19 0:47 GMT+02:00 Dmitry Goldenberg <dg...@kmwllc.com>:
>
>> Hi,
>> I'm getting errors whether I'm trying to use Phoenix using the client jar
>> or the minimal jar, in the context of an application deployed to Tomcat.
>> In the client jar, I have hbase.defaults.for.version set to
>> 0.98.1-hadoop2,
>> and I'm running against hadoop 2.4 and hbase 0.98.2. I get the error whose
>> stack trace I'm attaching as Stack 1 below. It's almost like Phoenix is
>> still trying to find Hadoop 1, perhaps?
>>
>> I've tried switching to the minimal jar and determining the set of
>> dependency jars for it and I get the error as per Stack 2 below. I've
>> looked at org.apache.hadoop.hbase.client.Scan and I can't seem to tell
>> what
>> could cause the NoClassDefFoundError there. Could it be the Hadoop
>> annotations or the LogFactory.getLog(Scan.class)?
>> Any ideas/pointers would be appreciated, thanks.
>>
>> Stack 1:
>> Caused by: java.lang.NoSuchMethodError:
>>
>> org.apache.hadoop.conf.Configuration.addDeprecations([Lorg/apache/hadoop/conf/Configuration$DeprecationDelta;)V
>> at
>>
>> org.apache.hadoop.hdfs.HdfsConfiguration.addDeprecatedKeys(HdfsConfiguration.java:66)
>> ~[hadoop-hdfs-2.4.0.jar:?]
>> at
>>
>> org.apache.hadoop.hdfs.HdfsConfiguration.<clinit>(HdfsConfiguration.java:31)
>> ~[hadoop-hdfs-2.4.0.jar:?]
>> at
>>
>> org.apache.hadoop.hdfs.DistributedFileSystem.<clinit>(DistributedFileSystem.java:110)
>> ~[hadoop-hdfs-2.4.0.jar:?]
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>> ~[?:1.6.0_28]
>> at
>>
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>> ~[?:1.6.0_28]
>> at
>>
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> ~[?:1.6.0_28]
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:534)
>> ~[?:1.6.0_28]
>> at java.lang.Class.newInstance(Class.java:374) ~[?:1.6.0_28]
>> at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:373)
>> ~[?:1.6.0_28]
>> at java.util.ServiceLoader$1.next(ServiceLoader.java:445) ~[?:1.6.0_28]
>> at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2400)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>> org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2411)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:166)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:351)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:201)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>> ~[?:1.6.0_28]
>> at
>>
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>> ~[?:1.6.0_28]
>> at
>>
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> ~[?:1.6.0_28]
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:534)
>> ~[?:1.6.0_28]
>> at
>>
>> org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:309)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.phoenix.query.HConnectionFactory$HConnectionFactoryImpl.createConnection(HConnectionFactory.java:47)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>> at
>>
>> org.apache.phoenix.query.ConnectionQueryServicesImpl.openConnection(ConnectionQueryServicesImpl.java:252)
>> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>>
>> ===============================
>>
>> Stack 2:
>> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
>> org.apache.hadoop.hbase.client.Scan
>> at
>>
>> org.apache.phoenix.compile.CreateTableCompiler.compile(CreateTableCompiler.java:81)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableCreateTableStatement.compilePlan(PhoenixStatement.java:436)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableCreateTableStatement.compilePlan(PhoenixStatement.java:425)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:224)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:908)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:1452)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:131)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at
>>
>> org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.connect(PhoenixEmbeddedDriver.java:112)
>> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
>> at java.sql.DriverManager.getConnection(DriverManager.java:571)
>> ~[?:1.7.0_51]
>> at java.sql.DriverManager.getConnection(DriverManager.java:233)
>> ~[?:1.7.0_51]
>>
>
>

Re: Phoenix client jar vs. minimal jar

Posted by Juan Rodríguez Hortalá <ju...@gmail.com>.
Hi Dmitry, I'm not sure if it's relevant, but with HBase 0.94.6 and Phoenix
3 things worked for me with phoenix-*client-without-hbase.jar,
/usr/lib/hbase/hbase.jar, /usr/lib/hadoop/hadoop-common.jar and
/usr/lib/hadoop/hadoop-auth.jar copied to  tomcat/lib in my instalation of
Saiku, which is also a Tomcat application.

I hope that helps,

Greetings,

Juan Rodríguez


2014-05-19 0:47 GMT+02:00 Dmitry Goldenberg <dg...@kmwllc.com>:

> Hi,
> I'm getting errors whether I'm trying to use Phoenix using the client jar
> or the minimal jar, in the context of an application deployed to Tomcat.
> In the client jar, I have hbase.defaults.for.version set to 0.98.1-hadoop2,
> and I'm running against hadoop 2.4 and hbase 0.98.2. I get the error whose
> stack trace I'm attaching as Stack 1 below. It's almost like Phoenix is
> still trying to find Hadoop 1, perhaps?
>
> I've tried switching to the minimal jar and determining the set of
> dependency jars for it and I get the error as per Stack 2 below. I've
> looked at org.apache.hadoop.hbase.client.Scan and I can't seem to tell what
> could cause the NoClassDefFoundError there. Could it be the Hadoop
> annotations or the LogFactory.getLog(Scan.class)?
> Any ideas/pointers would be appreciated, thanks.
>
> Stack 1:
> Caused by: java.lang.NoSuchMethodError:
>
> org.apache.hadoop.conf.Configuration.addDeprecations([Lorg/apache/hadoop/conf/Configuration$DeprecationDelta;)V
> at
>
> org.apache.hadoop.hdfs.HdfsConfiguration.addDeprecatedKeys(HdfsConfiguration.java:66)
> ~[hadoop-hdfs-2.4.0.jar:?]
> at
>
> org.apache.hadoop.hdfs.HdfsConfiguration.<clinit>(HdfsConfiguration.java:31)
> ~[hadoop-hdfs-2.4.0.jar:?]
> at
>
> org.apache.hadoop.hdfs.DistributedFileSystem.<clinit>(DistributedFileSystem.java:110)
> ~[hadoop-hdfs-2.4.0.jar:?]
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> ~[?:1.6.0_28]
> at
>
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> ~[?:1.6.0_28]
> at
>
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> ~[?:1.6.0_28]
> at java.lang.reflect.Constructor.newInstance(Constructor.java:534)
> ~[?:1.6.0_28]
> at java.lang.Class.newInstance(Class.java:374) ~[?:1.6.0_28]
> at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:373)
> ~[?:1.6.0_28]
> at java.util.ServiceLoader$1.next(ServiceLoader.java:445) ~[?:1.6.0_28]
> at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2400)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2411)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:166)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:351)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:201)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> ~[?:1.6.0_28]
> at
>
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> ~[?:1.6.0_28]
> at
>
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> ~[?:1.6.0_28]
> at java.lang.reflect.Constructor.newInstance(Constructor.java:534)
> ~[?:1.6.0_28]
> at
>
> org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:309)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.phoenix.query.HConnectionFactory$HConnectionFactoryImpl.createConnection(HConnectionFactory.java:47)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
> at
>
> org.apache.phoenix.query.ConnectionQueryServicesImpl.openConnection(ConnectionQueryServicesImpl.java:252)
> ~[phoenix-4.0.0-incubating-client.jar:4.0.0-incubating]
>
> ===============================
>
> Stack 2:
> Caused by: java.lang.NoClassDefFoundError: Could not initialize class
> org.apache.hadoop.hbase.client.Scan
> at
>
> org.apache.phoenix.compile.CreateTableCompiler.compile(CreateTableCompiler.java:81)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableCreateTableStatement.compilePlan(PhoenixStatement.java:436)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableCreateTableStatement.compilePlan(PhoenixStatement.java:425)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:224)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:908)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:1452)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:131)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at
>
> org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.connect(PhoenixEmbeddedDriver.java:112)
> ~[phoenix-4.0.0-incubating-client-minimal.jar:?]
> at java.sql.DriverManager.getConnection(DriverManager.java:571)
> ~[?:1.7.0_51]
> at java.sql.DriverManager.getConnection(DriverManager.java:233)
> ~[?:1.7.0_51]
>